Skip to content
Snippets Groups Projects
Commit 4eff4a06 authored by cramakri's avatar cramakri
Browse files

LMS-1569 Package renaming.

SVN: 16552
parent 61059ba4
No related branches found
No related tags found
No related merge requests found
/*
* Copyright 2010 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.etl.dataaccess.fvec;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import org.testng.AssertJUnit;
import org.testng.annotations.Test;
import com.csvreader.CsvReader;
import ch.systemsx.cisd.common.exceptions.UserFailureException;
import ch.systemsx.cisd.openbis.dss.etl.featurevector.CSVToCanonicalFeatureVector;
import ch.systemsx.cisd.openbis.dss.etl.featurevector.CanonicalFeatureVector;
import ch.systemsx.cisd.openbis.dss.etl.featurevector.CSVToCanonicalFeatureVector.CSVToCanonicalFeatureVectorConfiguration;
import ch.systemsx.cisd.openbis.dss.generic.server.plugins.tasks.DatasetFileLines;
/**
* @author Chandrasekhar Ramakrishnan
*/
public class CSVToCanonicalFeatureVectorTest extends AssertJUnit
{
@Test
public void testConversion() throws IOException
{
CSVToCanonicalFeatureVectorConfiguration config =
new CSVToCanonicalFeatureVectorConfiguration("WellName", "WellName", true);
CSVToCanonicalFeatureVector convertor =
new CSVToCanonicalFeatureVector(getDatasetFileLines(), config);
ArrayList<CanonicalFeatureVector> fvs = convertor.convert();
// Not all the the columns contain numerical data
assertEquals(16, fvs.size());
}
/**
* Return the tabular data as a DatasetFileLines.
*/
protected DatasetFileLines getDatasetFileLines() throws IOException
{
File file =
new File(
"sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/graph/CP037-1df.csv");
CsvReader reader = getCsvReader(file);
List<String[]> lines = new ArrayList<String[]>();
while (reader.readRecord())
{
lines.add(reader.getValues());
}
return new DatasetFileLines(file, "test", lines);
}
/**
* Get a CsvReader for parsing a tabular data file.
*/
protected CsvReader getCsvReader(File file) throws IOException
{
if (file.isFile() == false)
{
throw new UserFailureException(file + " does not exist or is not a file.");
}
FileInputStream fileInputStream = new FileInputStream(file);
CsvReader csvReader = new CsvReader(fileInputStream, Charset.defaultCharset());
csvReader.setDelimiter(';');
csvReader.setSkipEmptyRecords(true);
csvReader.setUseComments(true);
csvReader.setComment('#');
return csvReader;
}
}
/*
* Copyright 2010 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.etl.dataaccess.fvec;
import static org.testng.AssertJUnit.assertEquals;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import ch.systemsx.cisd.base.convert.NativeTaggedArray;
import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.AbstractDBTest;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.DBUtils;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingUploadDAO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgFeatureDefDTO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgFeatureValuesDTO;
import ch.systemsx.cisd.openbis.dss.etl.featurevector.CanonicalFeatureVector;
import ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorUploader;
/**
* @author Chandrasekhar Ramakrishnan
*/
public class FeatureVectorUploaderTest extends AbstractDBTest
{
private static final String EXP_PERM_ID = "expFvuId";
private static final String CONTAINER_PERM_ID = "cFvuId";
private static final String DS_PERM_ID = "dsFvuId";
private static final String TEST_FEATURE_NAME = "test";
private IImagingUploadDAO dao;
@BeforeClass(alwaysRun = true)
public void init() throws SQLException
{
dao = DBUtils.getQuery(datasource, IImagingUploadDAO.class);
}
@Test
public void testInit()
{
// tests that parameter bindings in all queries are correct
}
@Test
public void testCreateFeatureValues()
{
ScreeningContainerDatasetInfo info = new ScreeningContainerDatasetInfo();
info.setExperimentPermId(EXP_PERM_ID);
info.setContainerPermId(CONTAINER_PERM_ID);
info.setDatasetPermId(DS_PERM_ID);
FeatureVectorUploader uploader = new FeatureVectorUploader(dao, info);
ArrayList<CanonicalFeatureVector> fvecs = new ArrayList<CanonicalFeatureVector>();
new FeatureVectorProducer(fvecs).produce();
uploader.uploadFeatureVectors(fvecs);
new FeatureVectorVerifier(fvecs.get(0).getFeatureDef().getDataSetId()).verify();
}
// Class should be non-static to call the assert methods
private class FeatureVectorVerifier
{
private final long datasetId;
// Execution state
private ImgFeatureDefDTO featureDef;
private ImgFeatureValuesDTO featureValues;
private int count;
private FeatureVectorVerifier(long datasetId)
{
this.datasetId = datasetId;
}
private void verify()
{
List<ImgFeatureDefDTO> featureDefs = dao.listFeatureDefsByDataSetId(datasetId);
assertEquals(2, featureDefs.size());
count = 0;
featureDef = featureDefs.get(count);
verifyFeatureDef();
featureDef = featureDefs.get(++count);
verifyFeatureDef();
}
private void verifyFeatureDef()
{
assertEquals(TEST_FEATURE_NAME + count, featureDef.getName());
List<ImgFeatureValuesDTO> featureValuesList = dao.getFeatureValues(featureDef);
assertEquals(1, featureValuesList.size());
featureValues = featureValuesList.get(0);
verifyFeatureValues();
}
private void verifyFeatureValues()
{
assertEquals(0.0, featureValues.getT());
assertEquals(0.0, featureValues.getZ());
MDDoubleArray spreadsheet =
NativeTaggedArray.tryToDoubleArray(featureValues.getValues());
int[] dims =
{ 3, 5 };
int[] spreadsheetDims = spreadsheet.dimensions();
assertEquals(spreadsheetDims.length, dims.length);
assertEquals(spreadsheetDims[0], dims[0]);
assertEquals(spreadsheetDims[1], dims[1]);
for (int i = 0; i < dims[0]; ++i)
{
for (int j = 0; j < dims[1]; ++j)
{
assertEquals((double) (i + j), spreadsheet.get(i, j));
}
}
}
}
private static class FeatureVectorProducer
{
private final ArrayList<CanonicalFeatureVector> fvecs;
private FeatureVectorProducer(ArrayList<CanonicalFeatureVector> fvecs)
{
this.fvecs = fvecs;
}
private void produce()
{
fvecs.add(createFeatureVector(0, 3, 5));
fvecs.add(createFeatureVector(1, 3, 5));
}
private CanonicalFeatureVector createFeatureVector(int i, int rowCount, int columnCount)
{
CanonicalFeatureVector fvec = new CanonicalFeatureVector();
String featureName = TEST_FEATURE_NAME + i;
String featureDesc = featureName + " desc";
ImgFeatureDefDTO featureDef = new ImgFeatureDefDTO(featureName, featureDesc, 0);
fvec.setFeatureDef(featureDef);
byte[] values = createValues(rowCount, columnCount);
ImgFeatureValuesDTO featureValues = new ImgFeatureValuesDTO(0.0, 0.0, values, 0);
fvec.setValues(Collections.singletonList(featureValues));
return fvec;
}
private byte[] createValues(int rowCount, int columnCount)
{
int[] dims =
{ rowCount, columnCount };
MDDoubleArray array = new MDDoubleArray(dims);
for (int i = 0; i < dims[0]; ++i)
{
for (int j = 0; j < dims[1]; ++j)
{
array.set(i + j, i, j);
}
}
return NativeTaggedArray.toByteArray(array);
}
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment