diff --git a/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/resolver-plugins/flow.py b/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/resolver-plugins/flow.py
index 5e1141655e9c4afd513627ce7acca60c36211774..32ffa0c8daecf5b67a0ef262d310645e8ee7e177 100644
--- a/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/resolver-plugins/flow.py
+++ b/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/resolver-plugins/flow.py
@@ -2,22 +2,20 @@ import script
 
 def addSampleChildNodes(path, samplePermId, sampleType, response, acceptor, context):
     dataSets = script.getDataSetsOfSampleAndItsChildren(samplePermId, context)
+    filteredDataSets = []
     for dataSet in dataSets:
         if acceptor.acceptDataSet(dataSet):
-            dataSetCode = dataSet.getCode()
-            content = context.getContentProvider().asContent(dataSetCode)
-            contentNode = content.getRootNode()
-            script.addDataSetFileNodes(path, dataSetCode, contentNode, response, acceptor, context)
+            filteredDataSets.append(dataSet)
+    script.addDataSetFileNodesFor(path, filteredDataSets, response, acceptor, context)
 
 def addSampleChildNodesWithPlates(path, samplePermId, sampleType, response, acceptor, context):
     dataSets = script.getDataSetsOfSampleAndItsChildren(samplePermId, context)
+    filteredDataSets = []
     for dataSet in dataSets:
         sampleTypeCode = dataSet.getSample().getType().getCode()
         if not sampleTypeCode.endswith("_WELL"):
-            dataSetCode = dataSet.getCode()
-            content = context.getContentProvider().asContent(dataSetCode)
-            contentNode = content.getRootNode()
-            script.addDataSetFileNodes(path, dataSetCode, contentNode, response, acceptor, context)
+            filteredDataSets.append(dataSet)
+    script.addDataSetFileNodesFor(path, filteredDataSets, response, acceptor, context)
     script.addSampleSampleChildNodes(path, samplePermId, response, acceptor, context)
 
 for t in ["FACS_ARIA", "INFLUX", "MOFLO_XDP", "S3E", "SONY_SH800S", "SONY_MA900"]:
diff --git a/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/resolver-plugins/microscopy.py b/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/resolver-plugins/microscopy.py
index 7429a9d6a6d34a90f0aff9aee79bca0a83e502e1..272ec466926079699eba16e5f3eaf48b0c93a8a8 100644
--- a/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/resolver-plugins/microscopy.py
+++ b/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/resolver-plugins/microscopy.py
@@ -7,11 +7,10 @@ acceptor.hideDataSetType("MICROSCOPY_IMG_THUMBNAIL")
 
 def addSampleChildNodes(path, samplePermId, sampleType, response, acceptor, context):
     dataSets = script.getDataSetsOfSampleAndItsChildren(samplePermId, context)
+    filteredDataSets = []
     for dataSet in dataSets:
         if acceptor.acceptDataSet(dataSet):
-            dataSetCode = dataSet.getCode()
-            content = context.getContentProvider().asContent(dataSetCode)
-            contentNode = content.getRootNode()
-            script.addDataSetFileNodes(path, dataSetCode, contentNode, response, acceptor, context)
+            filteredDataSets.append(dataSet)
+    script.addDataSetFileNodesFor(path, filteredDataSets, response, acceptor, context)
 
 acceptor.sampleChildrenHandlers["MICROSCOPY_EXPERIMENT"] = addSampleChildNodes
diff --git a/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/script.py b/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/script.py
index c94b6c7577d646fe4fbe99f8cda459010996843b..fa1a17da033b270eae771c9e71f4c0c8785bdaf3 100644
--- a/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/script.py
+++ b/ui-eln-lims/src/core-plugins/eln-lims/1/dss/file-system-plugins/eln-tree/script.py
@@ -15,6 +15,10 @@ from ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.search import SampleSearchC
 from ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.fetchoptions import SampleFetchOptions
 from ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.search import DataSetSearchCriteria
 from ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.fetchoptions import DataSetFetchOptions
+from ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.id import DataSetPermId
+from ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset import DataSetKind
+from ch.systemsx.cisd.openbis.generic.shared.basic.dto import PhysicalDataSet
+from ch.systemsx.cisd.openbis.generic.shared.basic.dto import DataStore
 from ch.systemsx.cisd.openbis.dss.generic.server.ftp import Node
 
 class NodeWithEntityType(Node):
@@ -233,8 +237,8 @@ def listChildren(subPath, acceptor, context):
     permId = node.getPermId()
     if nodeType == "DATASET":
         response = None
-        for permId in node.permIds:
-            dataSetCode, contentNode, content = getContentNode(permId, context)
+        contentNodes = getContentNodes(node.permIds, context)
+        for dataSetCode, contentNode, content in contentNodes:
             if contentNode.isDirectory():
                 if response is None:
                     response = context.createDirectoryResponse()
@@ -352,6 +356,49 @@ def addDataSetFileNodes(path, dataSetCode, contentNode, response, acceptor, cont
                 else:
                     response.addFile(nodeName, childNode)
 
+def addDataSetFileNodesFor(path, dataSets, response, acceptor, context):
+    contentNodes = asContentNodes(dataSets, context)
+    for dataSetCode, contentNode, _ in contentNodes:
+        addDataSetFileNodes(path, dataSetCode, contentNode, response, acceptor, context)
+
+def getContentNodes(permIds, context):
+    ids = []
+    paths = []
+    for permId in permIds:
+        splittedId = permId.split("::")
+        ids.append(DataSetPermId(splittedId[0]))
+        paths.append(splittedId[1] if len(splittedId) > 1 else None)
+
+    fetchOptions = DataSetFetchOptions()
+    fetchOptions.withDataStore()
+    fetchOptions.withPhysicalData()
+    dataSets = context.getApi().getDataSets(context.getSessionToken(), ids, fetchOptions).values()
+    return asContentNodes(dataSets, context, paths)
+
+def asContentNodes(dataSets, context, paths=None):
+    result = []
+    contentProvider = context.getContentProvider()
+    for i in range(len(dataSets)):
+        dataSet = dataSets[i]
+        dataSetCode = dataSet.getCode()
+        dataStore = DataStore()
+        dataStore.setCode(dataSet.getDataStore().getCode())
+        dataStore.setHostUrl(dataSet.getDataStore().getDownloadUrl())
+        kind = dataSet.getKind()
+        if kind == DataSetKind.PHYSICAL:
+            physicalData = dataSet.getPhysicalData()
+            physicalDataSet = PhysicalDataSet()
+            physicalDataSet.setCode(dataSetCode)
+            physicalDataSet.setLocation(physicalData.getLocation())
+            physicalDataSet.setDataStore(dataStore)
+            physicalDataSet.setShareId(physicalData.getShareId())
+            content = contentProvider.asContentWithoutModifyingAccessTimestamp(physicalDataSet)
+            contentNode = content.getRootNode() if paths is None or paths[i] is None else content.tryGetNode(paths[i])
+            result.append((dataSetCode, contentNode, content))
+        else:
+            raise Exception("Not supported data set kind: %s" % kind)
+    return result
+
 def getContentNode(permId, context):
     splittedId = permId.split("::")
     dataSetCode = splittedId[0]
@@ -366,6 +413,8 @@ def getDataSetsOfSampleAndItsChildren(samplePermId, context):
     parentsSearchCriteria = dataSetSearchCriteria.withSample().withParents()
     parentsSearchCriteria.withPermId().thatEquals(samplePermId)
     fetchOptions = DataSetFetchOptions()
+    fetchOptions.withDataStore()
+    fetchOptions.withPhysicalData()
     fetchOptions.withType()
     fetchOptions.withProperties()
     fetchOptions.withSample().withType()