diff --git a/datastore_server/source/java/ch/systemsx/cisd/etlserver/api/v1/PutDataSetTopLevelDataSetHandler.java b/datastore_server/source/java/ch/systemsx/cisd/etlserver/api/v1/PutDataSetTopLevelDataSetHandler.java index 71b729c4ffb986c0044d46d4a1c68a821d127f6c..79479a8e3c21e251d80667208b04327768bab568 100644 --- a/datastore_server/source/java/ch/systemsx/cisd/etlserver/api/v1/PutDataSetTopLevelDataSetHandler.java +++ b/datastore_server/source/java/ch/systemsx/cisd/etlserver/api/v1/PutDataSetTopLevelDataSetHandler.java @@ -87,6 +87,8 @@ class PutDataSetTopLevelDataSetHandler private final File temporaryIncomingDir; private final File dataSetDir; + + private final File dataSet; PutDataSetTopLevelDataSetHandler(PutDataSetService service, ITopLevelDataSetRegistrator registrator, String sessionToken, NewDataSetDTO newDataSet, @@ -98,7 +100,23 @@ class PutDataSetTopLevelDataSetHandler this.newDataSet = newDataSet; this.inputStream = inputStream; this.temporaryIncomingDir = service.createTemporaryIncomingDir(); - this.dataSetDir = new File(temporaryIncomingDir, newDataSet.getDataSetFolderName()); + String dataSetFolderName = newDataSet.getDataSetFolderName(); + // TODO, 2011-04-27, FJE: It would be better to allow null value for + // newDataSet.getDataSetFolderName() + // in order to distinguish a file data set from a folder data set. But this is a change + // in API which is small but non-backward compatible. + boolean dataSetIsASingleFile = + NewDataSetDTO.DEFAULT_DATA_SET_FOLDER_NAME.equals(dataSetFolderName) + && newDataSet.getFileInfos().size() == 1; + if (dataSetIsASingleFile) + { + dataSetDir = temporaryIncomingDir; + dataSet = new File(temporaryIncomingDir, newDataSet.getFileInfos().get(0).getPathInDataSet()); + } else + { + this.dataSetDir = new File(temporaryIncomingDir, dataSetFolderName); + dataSet = dataSetDir; + } if (dataSetDir.exists()) { deleteDataSetDir(); @@ -106,7 +124,7 @@ class PutDataSetTopLevelDataSetHandler if (false == this.dataSetDir.mkdir()) { throw new EnvironmentFailureException("Could not create directory for data set " - + newDataSet.getDataSetFolderName()); + + dataSet.getName()); } } @@ -130,7 +148,7 @@ class PutDataSetTopLevelDataSetHandler try { DataSetRegistratorDelegate delegate = new DataSetRegistratorDelegate(); - registrator.handle(dataSetDir, getCallerDataSetInformation(), delegate); + registrator.handle(dataSet, getCallerDataSetInformation(), delegate); return delegate.registeredDataSets; } finally { @@ -153,8 +171,7 @@ class PutDataSetTopLevelDataSetHandler public DataSetOwner getDataSetOwner() { - DataSetOwner owner = newDataSet.getDataSetOwner(); - return owner; + return newDataSet.getDataSetOwner(); } public DataSetInformation getCallerDataSetInformation() diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/NewDataSetDTO.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/NewDataSetDTO.java index dce8c7550840739fbffdcd1d1247bbe9e4ca1972..18964d505cedd6384a6060d38cf155ddbefd560e 100644 --- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/NewDataSetDTO.java +++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/NewDataSetDTO.java @@ -37,7 +37,7 @@ import org.apache.commons.lang.builder.ToStringStyle; */ public class NewDataSetDTO implements Serializable { - private static String DEFAULT_DATA_SET_FOLDER_NAME = "original"; + public static String DEFAULT_DATA_SET_FOLDER_NAME = "original"; /** * The different types of owners of data sets; there are two: experiment and sample. diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/api/v1/PutDataSetTopLevelDataSetHandlerTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/api/v1/PutDataSetTopLevelDataSetHandlerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..b6a43f8e1e7c615e5fa0da7312e0eeb43ac3c3f9 --- /dev/null +++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/api/v1/PutDataSetTopLevelDataSetHandlerTest.java @@ -0,0 +1,259 @@ +/* + * Copyright 2011 ETH Zuerich, CISD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ch.systemsx.cisd.etlserver.api.v1; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; + +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.jmock.Expectations; +import org.jmock.Mockery; +import org.testng.annotations.AfterMethod; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import ch.systemsx.cisd.base.tests.AbstractFileSystemTestCase; +import ch.systemsx.cisd.common.filesystem.FileUtilities; +import ch.systemsx.cisd.common.io.ByteArrayBasedContent; +import ch.systemsx.cisd.common.io.ConcatenatedContentInputStream; +import ch.systemsx.cisd.common.io.IContent; +import ch.systemsx.cisd.common.logging.BufferedAppender; +import ch.systemsx.cisd.common.logging.LogCategory; +import ch.systemsx.cisd.common.logging.LogFactory; +import ch.systemsx.cisd.common.logging.LogInitializer; +import ch.systemsx.cisd.common.test.RecordingMatcher; +import ch.systemsx.cisd.etlserver.ITopLevelDataSetRegistrator; +import ch.systemsx.cisd.etlserver.ITopLevelDataSetRegistratorDelegate; +import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService; +import ch.systemsx.cisd.openbis.dss.generic.shared.api.v1.FileInfoDssDTO; +import ch.systemsx.cisd.openbis.dss.generic.shared.api.v1.NewDataSetDTO; +import ch.systemsx.cisd.openbis.dss.generic.shared.api.v1.NewDataSetDTO.DataSetOwner; +import ch.systemsx.cisd.openbis.dss.generic.shared.api.v1.NewDataSetDTO.DataSetOwnerType; +import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation; +import ch.systemsx.cisd.openbis.generic.shared.basic.dto.DatabaseInstance; +import ch.systemsx.cisd.openbis.generic.shared.dto.NewProperty; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifier; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifierFactory; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifierFactory; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SpaceIdentifier; + +/** + * + * + * @author Franz-Josef Elmer + */ +public class PutDataSetTopLevelDataSetHandlerTest extends AbstractFileSystemTestCase +{ + private static final String DATABASE_INSTANCE_CODE = "DB"; + private static final String SESSION_TOKEN = "session-1"; + private static final String DATA_SET_CODE = "ds-1"; + private static final Logger logger = LogFactory.getLogger(LogCategory.OPERATION, + PutDataSetTopLevelDataSetHandlerTest.class); + + private BufferedAppender logRecorder; + private Mockery context; + private IEncapsulatedOpenBISService service; + private PutDataSetService putDataSetService; + + private ITopLevelDataSetRegistrator registrator; + private File incomingDir; + private File storeDir; + + @BeforeMethod + public void beforeMethod() + { + LogInitializer.init(); + logRecorder = new BufferedAppender("%-5p %c - %m%n", Level.DEBUG); + context = new Mockery(); + service = context.mock(IEncapsulatedOpenBISService.class); + registrator = context.mock(ITopLevelDataSetRegistrator.class); + final DatabaseInstance databaseInstance = new DatabaseInstance(); + databaseInstance.setCode(DATABASE_INSTANCE_CODE); + context.checking(new Expectations() + { + { + one(service).getHomeDatabaseInstance(); + will(returnValue(databaseInstance)); + } + }); + storeDir = new File(workingDirectory, "store/"); + storeDir.mkdirs(); + incomingDir = new File(workingDirectory, "incoming/"); + incomingDir.mkdirs(); + putDataSetService = + new PutDataSetService(service, logger, storeDir, incomingDir, + new TestDataSetTypeToTopLevelRegistratorMapper(registrator), null, + DATA_SET_CODE, null); + } + + @AfterMethod + public void afterMethod(Method method) + { + assertEquals(0, incomingDir.listFiles().length); + logRecorder.reset(); + try + { + context.assertIsSatisfied(); + } catch (Throwable t) + { + // assert expectations were met, including the name of the failed method + throw new Error(method.getName() + "() : ", t); + } + } + + @Test + public void testDataSetFile() throws IOException + { + DataSetOwner dataSetOwner = new DataSetOwner(DataSetOwnerType.EXPERIMENT, "/S/P/E1"); + final ExperimentIdentifier experimentIdentifier = + ExperimentIdentifierFactory.parse(dataSetOwner.getIdentifier()); + context.checking(new Expectations() + { + { + one(service).createDataSetCode(); + will(returnValue(DATA_SET_CODE)); + + one(service).checkSpaceAccess( + SESSION_TOKEN, + new SpaceIdentifier(experimentIdentifier.getDatabaseInstanceCode(), + experimentIdentifier.getSpaceCode())); + } + }); + RecordingMatcher<DataSetInformation> dataSetInfoMatcher = + new RecordingMatcher<DataSetInformation>(); + FileInfoDssDTO f1 = new FileInfoDssDTO("hello.txt", "hello", false, 12); + File file1 = new File(incomingDir, DATA_SET_CODE + "/" + f1.getPathInDataSet()); + List<IContent> contents = + prepareRegistrator(file1, Arrays.asList(file1), Arrays.asList("hello world"), + dataSetInfoMatcher); + NewDataSetDTO newDataSet = new NewDataSetDTO(dataSetOwner, null, Arrays.asList(f1)); + newDataSet.setDataSetTypeOrNull("MY-TYPE"); + HashMap<String, String> props = new HashMap<String, String>(); + props.put("name", "Albert"); + newDataSet.setProperties(props); + ConcatenatedContentInputStream inputStream = + new ConcatenatedContentInputStream(true, contents); + PutDataSetTopLevelDataSetHandler handler = + new PutDataSetTopLevelDataSetHandler(putDataSetService, registrator, SESSION_TOKEN, + newDataSet, inputStream); + + handler.execute(); + + assertEquals("MY-TYPE", dataSetInfoMatcher.recordedObject().getDataSetType().getCode()); + List<NewProperty> dataSetProperties = + dataSetInfoMatcher.recordedObject().getDataSetProperties(); + assertEquals("name", dataSetProperties.get(0).getPropertyCode()); + assertEquals("Albert", dataSetProperties.get(0).getValue()); + assertEquals(1, dataSetProperties.size()); + assertEquals(experimentIdentifier, dataSetInfoMatcher.recordedObject() + .getExperimentIdentifier()); + assertEquals("", logRecorder.getLogContent()); + } + + @Test + public void testDataSetFolder() throws IOException + { + DataSetOwner dataSetOwner = new DataSetOwner(DataSetOwnerType.SAMPLE, "/S/S1"); + final SampleIdentifier sampleIdentifier = + SampleIdentifierFactory.parse(dataSetOwner.getIdentifier()); + context.checking(new Expectations() + { + { + one(service).createDataSetCode(); + will(returnValue(DATA_SET_CODE)); + + one(service).checkSpaceAccess(SESSION_TOKEN, sampleIdentifier.getSpaceLevel()); + } + }); + RecordingMatcher<DataSetInformation> dataSetInfoMatcher = + new RecordingMatcher<DataSetInformation>(); + FileInfoDssDTO f1 = new FileInfoDssDTO("hello.txt", "hello", false, 12); + File dataSet = new File(incomingDir, DATA_SET_CODE + "/ds-folder"); + File file1 = new File(dataSet, f1.getPathInDataSet()); + FileInfoDssDTO f2 = new FileInfoDssDTO("subdir/hi.txt", "hi", false, 10); + File file2 = new File(dataSet, f2.getPathInDataSet()); + FileInfoDssDTO d1 = new FileInfoDssDTO("subdir", "subdir", true, 12); + List<IContent> contents = + prepareRegistrator(dataSet, Arrays.asList(file1, file2), + Arrays.asList("hello world", "hi universe"), dataSetInfoMatcher); + NewDataSetDTO newDataSet = + new NewDataSetDTO(dataSetOwner, "ds-folder", Arrays.asList(f1, d1, f2)); + ConcatenatedContentInputStream inputStream = + new ConcatenatedContentInputStream(true, contents); + PutDataSetTopLevelDataSetHandler handler = + new PutDataSetTopLevelDataSetHandler(putDataSetService, registrator, SESSION_TOKEN, + newDataSet, inputStream); + + handler.execute(); + + assertEquals(null, dataSetInfoMatcher.recordedObject().getDataSetType()); + assertEquals(0, dataSetInfoMatcher.recordedObject().getDataSetProperties().size()); + assertEquals(null, dataSetInfoMatcher.recordedObject().getExperimentIdentifier()); + assertEquals(null, dataSetInfoMatcher.recordedObject().getInstanceCode()); + assertEquals("S", dataSetInfoMatcher.recordedObject().getSpaceCode()); + assertEquals("S1", dataSetInfoMatcher.recordedObject().getSampleCode()); + assertEquals("", logRecorder.getLogContent()); + } + + private List<IContent> prepareRegistrator(final File dataSet, final List<File> files, + final List<String> contents, + final RecordingMatcher<DataSetInformation> dataSetInfoMatcher) + { + context.checking(new Expectations() + { + { + one(registrator).handle(with(dataSet), with(dataSetInfoMatcher), + with(new BaseMatcher<ITopLevelDataSetRegistratorDelegate>() + { + public boolean matches(Object item) + { + // We can check file content only here because after + // in vocation of handle() all files are deleted. + for (int i = 0; i < files.size(); i++) + { + File file = files.get(i); + assertEquals("Content of " + file, contents.get(i), + FileUtilities.loadToString(file).trim()); + } + return true; + } + + public void describeTo(Description description) + { + } + })); + } + }); + + List<IContent> result = new ArrayList<IContent>(); + for (int i = 0; i < files.size(); i++) + { + File file = files.get(i); + result.add(new ByteArrayBasedContent(contents.get(i).getBytes(), file.getName())); + } + return result; + } +}