Skip to content
Snippets Groups Projects
Commit a59485c9 authored by tpylak's avatar tpylak
Browse files

LMS-1645 dynamix: remove BDS migration code (easier maintenance)

SVN: 17461
parent 1af95e6a
No related branches found
No related tags found
No related merge requests found
/*
* Copyright 2010 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.etl.bdsmigration;
import java.io.File;
import ch.systemsx.cisd.etlserver.plugins.IMigrator;
/**
* @author Franz-Josef Elmer
*/
abstract class AbstractBDSMigrator implements IMigrator
{
public boolean migrate(File dataset)
{
if (isBDS(dataset))
{
return doMigration(dataset);
}
return true;
}
protected abstract boolean doMigration(File dataset);
private static boolean isBDS(File dataset)
{
File[] files = dataset.listFiles();
return containsDir(files, BDSMigrationUtils.VERSION_DIR)
&& containsDir(files, BDSMigrationUtils.DATA_DIR)
&& containsDir(files, BDSMigrationUtils.METADATA_DIR)
&& containsDir(files, BDSMigrationUtils.ANNOTATIONS_DIR);
}
private static boolean containsDir(File[] files, String dirName)
{
if (files != null)
{
for (File file : files)
{
if (file.getName().equalsIgnoreCase(dirName))
{
return true;
}
}
}
return false;
}
public void close()
{
// do nothing
}
}
/*
* Copyright 2010 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.etl.bdsmigration;
import java.io.File;
import java.util.Properties;
import ch.systemsx.cisd.common.exceptions.EnvironmentFailureException;
import ch.systemsx.cisd.common.filesystem.FileUtilities;
/**
* Removes unnecessary BDS data as a part of the migration.
*
* @author Tomasz Pylak
*/
public class BDSDataRemoverMigrator extends AbstractBDSMigrator
{
public BDSDataRemoverMigrator()
{
}
// Every IMigrator needs the following constructor.
public BDSDataRemoverMigrator(Properties properties)
{
}
public String getDescription()
{
return "removing unnecessary BDS data";
}
@Override
protected boolean doMigration(File dataset)
{
if (BDSMigrationUtils.tryGetOriginalDir(dataset) != null)
{
BDSMigrationUtils.logError(dataset, "original data has not been moved");
return false;
}
try
{
removeDir(dataset, BDSMigrationUtils.METADATA_DIR);
removeDir(dataset, BDSMigrationUtils.VERSION_DIR);
removeDir(dataset, BDSMigrationUtils.ANNOTATIONS_DIR);
removeDir(dataset, BDSMigrationUtils.DATA_DIR);
} catch (EnvironmentFailureException ex)
{
return false;
}
return true;
}
private void removeDir(File dataset, String relativeDirPath) throws EnvironmentFailureException
{
File dir = new File(dataset, relativeDirPath);
boolean ok = FileUtilities.deleteRecursively(dir);
if (ok == false)
{
String errorMsg = "Cannot delete the directory: " + dir.getAbsolutePath();
BDSMigrationUtils.operationLog.error(errorMsg);
throw new EnvironmentFailureException(errorMsg);
}
}
}
\ No newline at end of file
/*
* Copyright 2010 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.etl.bdsmigration;
import java.io.File;
import java.util.List;
import java.util.Properties;
import javax.sql.DataSource;
import net.lemnik.eodsql.QueryTool;
import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
import ch.systemsx.cisd.common.utilities.PropertyUtils;
import ch.systemsx.cisd.openbis.dss.etl.AbstractHCSImageFileExtractor;
import ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor;
import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ColorComponent;
import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.IImagingQueryDAO;
/**
* @author Franz-Josef Elmer
*/
public class BDSImagingDatabaseMigrator extends AbstractBDSMigrator
{
private final List<String> channelNames;
private final List<ColorComponent> channelColorComponentsOrNull;
private final DataSource dataSource;
private static File tryGetOriginalDir(File dataset)
{
File orgDir =
new File(dataset, BDSMigrationUtils.DATA_DIR + BDSMigrationUtils.DIR_SEP
+ BDSMigrationUtils.ORIGINAL_DIR);
if (orgDir.isDirectory() == false)
{
return null;
}
return orgDir;
}
private static String tryGetOriginalDatasetDirName(File dataset)
{
File originalDir = tryGetOriginalDir(dataset);
if (originalDir == null)
{
BDSMigrationUtils.logError(dataset, "Original directory does not exist.");
return null;
}
File[] files = originalDir.listFiles();
if (files.length != 1)
{
BDSMigrationUtils.logError(dataset, "Original directory '" + originalDir
+ "' should contain exactly one file, but contains " + files.length + ": "
+ files);
return null;
}
return files[0].getName();
}
public BDSImagingDatabaseMigrator(Properties properties)
{
this.dataSource = ServiceProvider.getDataSourceProvider().getDataSource(properties);
this.channelNames =
PropertyUtils.getMandatoryList(properties, PlateStorageProcessor.CHANNEL_NAMES);
this.channelColorComponentsOrNull =
AbstractHCSImageFileExtractor.tryGetChannelComponents(properties);
if (channelColorComponentsOrNull != null
&& channelColorComponentsOrNull.size() != channelNames.size())
{
throw new ConfigurationFailureException(
"There should be exactly one color component for each channel name."
+ " Correct the list of values for the components property.");
}
}
public String getDescription()
{
return "uploading data to the imaging database";
}
@Override
protected boolean doMigration(File dataset)
{
String originalDatasetDirName = tryGetOriginalDatasetDirName(dataset);
if (originalDatasetDirName == null)
{
return false;
}
IImagingQueryDAO dao = createQuery();
boolean ok =
new BDSImagingDbUploader(dataset, dao, originalDatasetDirName, channelNames,
channelColorComponentsOrNull).migrate();
dao = null;
return ok;
}
private IImagingQueryDAO createQuery()
{
return QueryTool.getQuery(dataSource, IImagingQueryDAO.class);
}
@Override
public void close()
{
// do nothing
}
}
/*
* Copyright 2010 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.etl.bdsmigration;
import static ch.systemsx.cisd.openbis.dss.etl.bdsmigration.BDSMigrationUtils.DIR_SEP;
import static ch.systemsx.cisd.openbis.dss.etl.bdsmigration.BDSMigrationUtils.METADATA_DIR;
import static ch.systemsx.cisd.openbis.dss.etl.bdsmigration.BDSMigrationUtils.ORIGINAL_DIR;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import ch.systemsx.cisd.bds.hcs.Location;
import ch.systemsx.cisd.openbis.dss.etl.AcquiredPlateImage;
import ch.systemsx.cisd.openbis.dss.etl.HCSDatasetUploader;
import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult;
import ch.systemsx.cisd.openbis.dss.etl.RelativeImageReference;
import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel;
import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ColorComponent;
import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.IImagingQueryDAO;
/**
* Uploads data to the imaging database.
*
* @author Tomasz Pylak
*/
class BDSImagingDbUploader
{
private final File dataset;
private final String originalDatasetDirName;
private final List<String> channelNames;
private final List<ColorComponent> channelColorComponentsOrNull;
private final IImagingQueryDAO dao;
public BDSImagingDbUploader(File dataset, IImagingQueryDAO dao, String originalDatasetDirName,
List<String> channelNames, List<ColorComponent> channelColorComponentsOrNull)
{
this.dao = dao;
this.dataset = dataset;
this.originalDatasetDirName = originalDatasetDirName;
this.channelNames = channelNames;
this.channelColorComponentsOrNull = channelColorComponentsOrNull;
}
public boolean migrate()
{
List<AcquiredPlateImage> images = tryExtractMappings();
if (images == null)
{
return false;
}
String relativeImagesDirectory = getRelativeImagesDirectory();
for (AcquiredPlateImage acquiredPlateImage : images)
{
acquiredPlateImage.getImageReference().setRelativeImageFolder(relativeImagesDirectory);
}
ScreeningContainerDatasetInfo info = ScreeningDatasetInfoExtractor.tryCreateInfo(dataset);
if (info == null)
{
return false;
}
Set<HCSImageFileExtractionResult.Channel> channels = extractChannels();
return storeInDatabase(images, info, channels);
}
private Set<Channel> extractChannels()
{
Set<Channel> channels = new HashSet<Channel>();
for (String channelName : channelNames)
{
channels.add(new Channel(channelName, null, null));
}
return channels;
}
private boolean storeInDatabase(List<AcquiredPlateImage> images,
ScreeningContainerDatasetInfo info, Set<HCSImageFileExtractionResult.Channel> channels)
{
try
{
HCSDatasetUploader.upload(dao, info, images, channels);
dao.commit();
return true;
} catch (Exception ex)
{
ex.printStackTrace();
logError("Uploading to the imaging db failed: " + ex.getMessage());
dao.rollback();
return false;
} finally
{
dao.close();
}
}
private String getRelativeImagesDirectory()
{
return ORIGINAL_DIR + DIR_SEP + originalDatasetDirName;
}
private List<AcquiredPlateImage> tryExtractMappings()
{
File mappingFile = new File(dataset, METADATA_DIR + DIR_SEP + "standard_original_mapping");
if (mappingFile.isFile() == false)
{
logError("File '" + mappingFile + "' does not exist.");
return null;
}
try
{
List<String> lines = readLines(mappingFile);
return tryParseMappings(lines);
} catch (IOException ex)
{
logError("Error when reading mapping file '" + mappingFile + "': " + ex.getMessage());
return null;
}
}
@SuppressWarnings("unchecked")
private static List<String> readLines(File mappingFile) throws IOException,
FileNotFoundException
{
FileInputStream stream = new FileInputStream(mappingFile);
try
{
return IOUtils.readLines(stream);
} finally
{
stream.close();
}
}
private List<AcquiredPlateImage> tryParseMappings(List<String> lines)
{
List<AcquiredPlateImage> images = new ArrayList<AcquiredPlateImage>();
for (String line : lines)
{
List<AcquiredPlateImage> mapping = tryParseMapping(line);
if (mapping != null)
{
images.addAll(mapping);
} else
{
return null;
}
}
return images;
}
private List<AcquiredPlateImage> tryParseMapping(String line)
{
String[] tokens = StringUtils.split(line);
if (tokens.length != 3)
{
logError("Wrong number of tokens in the mapping line: " + line);
return null;
} else
{
try
{
return tryParseMappingLine(tokens[0], tokens[2]);
} catch (NumberFormatException ex)
{
logError("Incorrect format of mapping line: " + line + ". Cannot parse a number: "
+ ex.getMessage());
return null;
}
}
}
// Example of standardPath: channel2/row1/column4/row2_column2.tiff
private List<AcquiredPlateImage> tryParseMappingLine(String standardPath, String originalPath)
throws NumberFormatException
{
String[] pathTokens = standardPath.split("/");
if (pathTokens.length != 4)
{
logError("Wrong number of tokens in standard path: " + standardPath);
return null;
}
int channelNum = asNum(pathTokens[0], "channel");
int row = asNum(pathTokens[1], "row");
int col = asNum(pathTokens[2], "column");
Location wellLocation = new Location(col, row);
String[] tileTokens = tryParseTileToken(pathTokens[3]);
if (tileTokens == null)
{
return null;
}
int tileRow = asNum(tileTokens[0], "row");
int tileCol = asNum(tileTokens[1], "column");
Location tileLocation = new Location(tileCol, tileRow);
String relativeImagePath = tryGetRelativeImagePath(originalPath);
if (relativeImagePath == null)
{
return null;
}
String channelName = tryGetChannelName(channelNum, standardPath);
if (channelName == null)
{
return null;
}
return createImages(wellLocation, tileLocation, relativeImagePath, channelName);
}
private static int asNum(String standardPathToken, String prefix) throws NumberFormatException
{
String number = standardPathToken.substring(prefix.length());
return Integer.parseInt(number);
}
private List<AcquiredPlateImage> createImages(Location wellLocation, Location tileLocation,
String relativeImagePath, String channelName)
{
List<AcquiredPlateImage> images = new ArrayList<AcquiredPlateImage>();
if (channelColorComponentsOrNull != null)
{
for (int i = 0; i < channelColorComponentsOrNull.size(); i++)
{
ColorComponent colorComponent = channelColorComponentsOrNull.get(i);
String channel = channelNames.get(i);
images.add(createImage(wellLocation, tileLocation, relativeImagePath, channel,
colorComponent));
}
} else
{
images
.add(createImage(wellLocation, tileLocation, relativeImagePath, channelName,
null));
}
return images;
}
private static AcquiredPlateImage createImage(Location plateLocation, Location wellLocation,
String imageRelativePath, String channelName, ColorComponent colorComponent)
{
return new AcquiredPlateImage(plateLocation, wellLocation, channelName, null, null,
new RelativeImageReference(imageRelativePath, null, colorComponent));
}
// channelId - starts with 1
private String tryGetChannelName(int channelId, String standardPath)
{
if (channelNames.size() < channelId)
{
logError("Name of the channel with the id " + channelId
+ " has not been configured but is referenced in the path: " + standardPath
+ ".");
return null;
}
return channelNames.get(channelId - 1);
}
private String tryGetRelativeImagePath(String originalPath)
{
String prefixPath = originalDatasetDirName + "/";
if (originalPath.startsWith(prefixPath) == false)
{
logError("Original path " + originalPath + " should start with " + prefixPath);
return null;
}
return originalPath.substring(prefixPath.length());
}
// tileFile - e.g. row2_column2.tiff
private String[] tryParseTileToken(String tileFile)
{
String tileDesc;
int dotIndex = tileFile.indexOf(".");
if (dotIndex != -1)
{
tileDesc = tileFile.substring(0, dotIndex);
} else
{
tileDesc = tileFile;
}
String[] tileTokens = tileDesc.split("_");
if (tileTokens.length != 2)
{
logError("Wrong number of tokens in tile file name: " + tileDesc);
return null;
}
return tileTokens;
}
private void logError(String reason)
{
BDSMigrationUtils.logError(dataset, reason);
}
}
\ No newline at end of file
/*
* Copyright 2010 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.etl.bdsmigration;
import java.io.File;
import org.apache.log4j.Logger;
import ch.systemsx.cisd.common.logging.LogCategory;
import ch.systemsx.cisd.common.logging.LogFactory;
import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.ScreeningConstants;
/**
* Constants and utility methods usful to migrate BDS datasets.
*
* @author Tomasz Pylak
*/
public class BDSMigrationUtils
{
static final Logger operationLog =
LogFactory.getLogger(LogCategory.OPERATION, BDSMigrationUtils.class);
static final String ANNOTATIONS_DIR = "annotations";
static final String METADATA_DIR = "metadata";
static final String DATA_DIR = "data";
static final String VERSION_DIR = "version";
static final String ORIGINAL_DIR = ScreeningConstants.ORIGINAL_DATA_DIR;
static final String DIR_SEP = "/";
static File tryGetOriginalDir(File dataset)
{
File orgDir = new File(dataset, DATA_DIR + DIR_SEP + ORIGINAL_DIR);
if (orgDir.isDirectory() == false)
{
return null;
}
return orgDir;
}
static void logError(File dataset, String reason)
{
operationLog.error("Cannot migrate dataset '" + dataset.getName() + "'. " + reason);
}
}
/*
* Copyright 2010 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.etl.bdsmigration;
import java.io.File;
import java.util.Properties;
/**
* Second step of BDS migration, moves data from data/original to original.
*
* @author Tomasz Pylak
*/
public class BDSOriginalDataRelocatorMigrator extends AbstractBDSMigrator
{
BDSOriginalDataRelocatorMigrator()
{
}
// Every IMigrator needs the following constructor.
public BDSOriginalDataRelocatorMigrator(Properties properties)
{
}
public String getDescription()
{
return "moving data from data/original to original/";
}
@Override
protected boolean doMigration(File dataset)
{
File originalDir = BDSMigrationUtils.tryGetOriginalDir(dataset);
if (originalDir == null)
{
BDSMigrationUtils.operationLog.warn("No original data directory in dataset " + dataset);
return false;
}
File destinationDir = new File(dataset, BDSMigrationUtils.ORIGINAL_DIR);
boolean ok = originalDir.renameTo(destinationDir);
if (ok == false)
{
BDSMigrationUtils.operationLog.error("Cannot move " + originalDir + " to "
+ destinationDir);
return false;
} else
{
return true;
}
}
}
\ No newline at end of file
/*
* Copyright 2010 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.etl.bdsmigration;
import static ch.systemsx.cisd.openbis.dss.etl.bdsmigration.BDSMigrationUtils.DIR_SEP;
import static ch.systemsx.cisd.openbis.dss.etl.bdsmigration.BDSMigrationUtils.METADATA_DIR;
import java.io.File;
import ch.systemsx.cisd.common.filesystem.FileOperations;
import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService;
import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Sample;
import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier;
import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SpaceIdentifier;
/**
* Extract the dataset metadata from BDS and by asking openBIS. Used to migrate BDS to imaging db.
*
* @author Tomasz Pylak
*/
class ScreeningDatasetInfoExtractor
{
public static ScreeningContainerDatasetInfo tryCreateInfo(File dataset)
{
Sample sample = tryGetSampleWithExperiment(dataset);
if (sample == null)
{
return null;
}
try
{
return createInfo(dataset, sample);
} catch (Exception ex)
{
ex.printStackTrace();
BDSMigrationUtils.logError(dataset, "Unexpected exception: " + ex.getMessage());
return null;
}
}
private static Sample tryGetSampleWithExperiment(File dataset)
{
IEncapsulatedOpenBISService openBISService = ServiceProvider.getOpenBISService();
SampleIdentifier sampleIdentifier = createSampleIdentifier(dataset);
Sample sample = openBISService.tryGetSampleWithExperiment(sampleIdentifier);
if (sample == null)
{
BDSMigrationUtils.logError(dataset, "Sample '" + sampleIdentifier
+ "' cannot be found in openBIS");
}
return sample;
}
private static SampleIdentifier createSampleIdentifier(File dataset)
{
File sampleDir = new File(dataset, METADATA_DIR + DIR_SEP + "sample");
String databaseInstanceCode = contentAsString(new File(sampleDir, "instance_code"));
String spaceCode = contentAsString(new File(sampleDir, "space_code"));
String sampleCode = contentAsString(new File(sampleDir, "code"));
SpaceIdentifier spaceIdentifier = new SpaceIdentifier(databaseInstanceCode, spaceCode);
return new SampleIdentifier(spaceIdentifier, sampleCode);
}
private static ScreeningContainerDatasetInfo createInfo(File dataset, Sample sample)
{
int rows = extractGeometryDim(dataset, "plate_geometry", "rows");
int columns = extractGeometryDim(dataset, "plate_geometry", "columns");
int tileRows = extractGeometryDim(dataset, "well_geometry", "rows");
int tileColumns = extractGeometryDim(dataset, "well_geometry", "columns");
ScreeningContainerDatasetInfo info = new ScreeningContainerDatasetInfo();
info.setContainerRows(rows);
info.setContainerColumns(columns);
info.setTileRows(tileRows);
info.setTileColumns(tileColumns);
info.setDatasetPermId(extractDatasetPermId(dataset));
info.setContainerPermId(sample.getPermId());
info.setExperimentPermId(sample.getExperiment().getPermId());
return info;
}
private static int extractGeometryDim(File dataset, String geometryName, String fieldName)
{
File parentDir =
new File(dataset, METADATA_DIR + DIR_SEP + "parameters" + DIR_SEP + geometryName);
return contentAsNumber(new File(parentDir, fieldName));
}
private static String extractDatasetPermId(File dataset)
{
File file =
new File(dataset, BDSMigrationUtils.METADATA_DIR + DIR_SEP + "data_set" + DIR_SEP
+ "code");
return contentAsString(file);
}
private static int contentAsNumber(File file)
{
return Integer.parseInt(contentAsString(file));
}
private static String contentAsString(File file)
{
return FileOperations.getInstance().getContentAsString(file).trim();
}
}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment