Skip to content
Snippets Groups Projects
Commit 11338eea authored by anttil's avatar anttil
Browse files

BIS-487 / SP-813: Refactoring

SVN: 29664
parent af4a26d8
No related branches found
No related tags found
No related merge requests found
Showing
with 323 additions and 191 deletions
...@@ -282,7 +282,7 @@ case "$command" in ...@@ -282,7 +282,7 @@ case "$command" in
;; ;;
verify-archives) verify-archives)
shift shift
java -cp lib/datastore_server.jar:lib/commons-lang.jar:lib/commons-io.jar:lib/postgresql.jar ch.systemsx.cisd.openbis.dss.archiveverifier.cli.Main etc/service.properties $* java -cp lib/* ch.systemsx.cisd.openbis.dss.archiveverifier.cli.Main etc/service.properties $*
;; ;;
log-thread-dump) log-thread-dump)
if [ -f $PIDFILE ]; then if [ -f $PIDFILE ]; then
......
/*
* Copyright 2013 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.archiveverifier.batch;
import java.io.PrintStream;
/**
* Result of configuration failure.
*
* @author anttil
*/
public class ConfigurationFailure implements IResult
{
private final String error;
public ConfigurationFailure(String error)
{
this.error = error;
}
@Override
public void printTo(String dataSet, PrintStream out)
{
out.println(error);
}
@Override
public ResultType getType()
{
return ResultType.FATAL;
}
}
...@@ -46,7 +46,7 @@ public class DataSetArchiveVerifier implements IDataSetArchiveVerifier ...@@ -46,7 +46,7 @@ public class DataSetArchiveVerifier implements IDataSetArchiveVerifier
return errors.isEmpty() ? new SuccessResult(file) : new FailedResult(file, errors); return errors.isEmpty() ? new SuccessResult(file) : new FailedResult(file, errors);
} else } else
{ {
return new FailedResult(); return new SkippedResult();
} }
} }
} }
...@@ -17,8 +17,7 @@ ...@@ -17,8 +17,7 @@
package ch.systemsx.cisd.openbis.dss.archiveverifier.batch; package ch.systemsx.cisd.openbis.dss.archiveverifier.batch;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.io.PrintStream;
import java.util.Arrays;
import java.util.List; import java.util.List;
/** /**
...@@ -33,16 +32,6 @@ public class FailedResult implements IResult ...@@ -33,16 +32,6 @@ public class FailedResult implements IResult
private final List<String> errors; private final List<String> errors;
public FailedResult()
{
this(null, new ArrayList<String>());
}
public FailedResult(String error)
{
this(null, Arrays.asList(error));
}
public FailedResult(File file, List<String> errors) public FailedResult(File file, List<String> errors)
{ {
this.file = file; this.file = file;
...@@ -50,33 +39,19 @@ public class FailedResult implements IResult ...@@ -50,33 +39,19 @@ public class FailedResult implements IResult
} }
@Override @Override
public boolean success() public void printTo(String dataSet, PrintStream out)
{ {
return false; out.println("FAILED - " + dataSet + " (" + file + ")");
} for (String error : errors)
@Override
public String getFile()
{
if (this.file != null)
{ {
return file.getAbsolutePath(); out.println(" " + error);
} else
{
return null;
} }
} }
@Override @Override
public List<String> getErrors() public ResultType getType()
{
return this.errors;
}
@Override
public String toString()
{ {
return "FailedResult: " + file + ": " + errors; return ResultType.FAILED;
} }
} }
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
package ch.systemsx.cisd.openbis.dss.archiveverifier.batch; package ch.systemsx.cisd.openbis.dss.archiveverifier.batch;
import java.util.List; import java.io.PrintStream;
/** /**
* Result of verification of a dataset archive file. * Result of verification of a dataset archive file.
...@@ -25,9 +25,7 @@ import java.util.List; ...@@ -25,9 +25,7 @@ import java.util.List;
*/ */
public interface IResult public interface IResult
{ {
boolean success(); public void printTo(String dataSet, PrintStream out);
String getFile(); public ResultType getType();
List<String> getErrors();
} }
/*
* Copyright 2013 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.archiveverifier.batch;
/**
* Type of a verification result.
*
* @author anttil
*/
public enum ResultType
{
OK, FAILED, SKIPPED, FATAL;
}
/*
* Copyright 2013 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.openbis.dss.archiveverifier.batch;
import java.io.PrintStream;
/**
* @author anttil
*/
public class SkippedResult implements IResult
{
@Override
public void printTo(String dataSet, PrintStream out)
{
out.println("NOT TESTED - " + dataSet + " (file not found)");
}
@Override
public ResultType getType()
{
return ResultType.SKIPPED;
}
}
...@@ -17,8 +17,7 @@ ...@@ -17,8 +17,7 @@
package ch.systemsx.cisd.openbis.dss.archiveverifier.batch; package ch.systemsx.cisd.openbis.dss.archiveverifier.batch;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.io.PrintStream;
import java.util.List;
/** /**
* Result of successfull verification * Result of successfull verification
...@@ -36,26 +35,15 @@ public class SuccessResult implements IResult ...@@ -36,26 +35,15 @@ public class SuccessResult implements IResult
} }
@Override @Override
public boolean success() public void printTo(String dataSet, PrintStream out)
{ {
return true; out.println("OK - " + dataSet + " (" + file + ")");
} }
@Override @Override
public String getFile() public ResultType getType()
{ {
return file.getAbsolutePath(); return ResultType.OK;
} }
@Override
public List<String> getErrors()
{
return new ArrayList<String>();
}
@Override
public String toString()
{
return "SuccessResult: " + file;
}
} }
...@@ -18,6 +18,7 @@ package ch.systemsx.cisd.openbis.dss.archiveverifier.cli; ...@@ -18,6 +18,7 @@ package ch.systemsx.cisd.openbis.dss.archiveverifier.cli;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader; import java.io.FileReader;
import java.io.IOException; import java.io.IOException;
import java.sql.Connection; import java.sql.Connection;
...@@ -30,9 +31,10 @@ import java.util.Properties; ...@@ -30,9 +31,10 @@ import java.util.Properties;
import java.util.SortedMap; import java.util.SortedMap;
import java.util.TreeMap; import java.util.TreeMap;
import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.ConfigurationFailure;
import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.DataSetArchiveVerificationBatch; import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.DataSetArchiveVerificationBatch;
import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.DataSetArchiveVerifier; import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.DataSetArchiveVerifier;
import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.FailedResult; import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.IArchiveFileRepository;
import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.IArchiveFileVerifier; import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.IArchiveFileVerifier;
import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.IDataSetArchiveVerificationBatch; import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.IDataSetArchiveVerificationBatch;
import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.IResult; import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.IResult;
...@@ -46,7 +48,6 @@ import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.CrcEnabled; ...@@ -46,7 +48,6 @@ import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.CrcEnabled;
import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.IArchiveFileMetaDataRepository; import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.IArchiveFileMetaDataRepository;
import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.ZipFileHeaderVerifier; import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.ZipFileHeaderVerifier;
import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.ZipFileIntegrityVerifier; import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.ZipFileIntegrityVerifier;
import ch.systemsx.cisd.openbis.dss.generic.shared.utils.DssPropertyParametersUtil;
/** /**
* Creates a DataSetArchiveVerificationBatch based on command line arguments. First argument should point to service.properties of the local DSS. Rest * Creates a DataSetArchiveVerificationBatch based on command line arguments. First argument should point to service.properties of the local DSS. Rest
...@@ -57,27 +58,10 @@ import ch.systemsx.cisd.openbis.dss.generic.shared.utils.DssPropertyParametersUt ...@@ -57,27 +58,10 @@ import ch.systemsx.cisd.openbis.dss.generic.shared.utils.DssPropertyParametersUt
public class DataSetArchiveVerificationBatchFactory public class DataSetArchiveVerificationBatchFactory
{ {
private static final String DEFAULT_ARCHIVE_FOLDER = "archiver.default-archive-folder";
private static final String MAPPING_FILE = "archiver.mapping-file";
private static final String SHARDING = "archiver.with-sharding";
private static final String DATABASE_NAME = "path-info-db.basicDatabaseName";
private static final String DATABASE_KIND = "path-info-db.databaseKind";
private static final String DATABASE_USER = "path-info-db.owner";
private static final String DATABASE_PASSWORD = "path-info-db.password";
private static final String PATHINFO_CHECKSUMS_COMPUTED = "post-registration.pathinfo-feeding.compute-checksum";
private final String[] args; private final String[] args;
/** private String servicePropertiesPath;
* @param args
*/
public DataSetArchiveVerificationBatchFactory(String[] args) public DataSetArchiveVerificationBatchFactory(String[] args)
{ {
this.args = args; this.args = args;
...@@ -85,72 +69,78 @@ public class DataSetArchiveVerificationBatchFactory ...@@ -85,72 +69,78 @@ public class DataSetArchiveVerificationBatchFactory
public IDataSetArchiveVerificationBatch build() public IDataSetArchiveVerificationBatch build()
{ {
if (args.length < 2) try
{ {
return error("Usage: datastore_server.sh verify-archives [dataset code 1] [dataset code 2] ..."); if (args.length < 2)
} {
throw new ConfigurationException("Usage: datastore_server.sh verify-archives <dataset_code_1> <dataset code 2> ...");
}
String servicePropertiesPath = args[0]; Properties properties = readServiceProperties(args[0]);
File defaultArchiveDirectory = getDefaultArchiveDirectory(properties);
IArchiveFileMetaDataRepository pathInfoRepository = getPathInfoRepository(properties);
List<File> archiveDirectories = getArchiveDirectories(properties, defaultArchiveDirectory);
IArchiveFileRepository archiveFileRepository = getArchiveFileRepository(properties, archiveDirectories);
List<IArchiveFileVerifier> verifiers = getVerifiers(properties, pathInfoRepository);
DataSetArchiveVerifier verifier = new DataSetArchiveVerifier(archiveFileRepository, new CompositeVerifier(verifiers));
File serviceProperties = new File(servicePropertiesPath); return new DataSetArchiveVerificationBatch(verifier, Arrays.copyOfRange(args, 1, args.length));
if (serviceProperties.exists() == false)
{
return error("File " + serviceProperties.getAbsolutePath() + " does not exist");
}
Properties properties = DssPropertyParametersUtil.loadProperties(serviceProperties.getAbsolutePath()); } catch (ConfigurationException e)
String defaultArchiveDirectoryPath = properties.getProperty(DEFAULT_ARCHIVE_FOLDER);
if (defaultArchiveDirectoryPath == null)
{ {
return error("Given service.properties file does not contain mandatory property " + DEFAULT_ARCHIVE_FOLDER); final String error = e.getMessage();
return new IDataSetArchiveVerificationBatch()
{
@Override
public SortedMap<String, IResult> run()
{
SortedMap<String, IResult> result = new TreeMap<String, IResult>();
result.put("Failed to start", new ConfigurationFailure(error));
return result;
}
};
} }
}
IArchiveFileMetaDataRepository pathInfoRepository = null; private List<IArchiveFileVerifier> getVerifiers(Properties properties, IArchiveFileMetaDataRepository pathInfoRepository)
{
String databaseName = properties.getProperty(DATABASE_NAME); List<IArchiveFileVerifier> verifiers = new ArrayList<IArchiveFileVerifier>();
String databaseKind = properties.getProperty(DATABASE_KIND); verifiers.add(new ZipFileIntegrityVerifier());
if (databaseName != null && databaseKind != null) if (pathInfoRepository != null)
{ {
String user = properties.getProperty(DATABASE_USER); verifiers.add(new ZipFileHeaderVerifier(pathInfoRepository,
String password = properties.getProperty(DATABASE_PASSWORD); "true".equalsIgnoreCase(properties.getProperty(PATHINFO_CHECKSUMS_COMPUTED)) ? CrcEnabled.TRUE : CrcEnabled.FALSE));
if (user == null || user.isEmpty())
{
user = "postgres";
}
if (password == null)
{
password = "";
}
String url = "jdbc:postgresql://localhost/" + databaseName + "_" + databaseKind;
try
{
Connection connection = DriverManager.getConnection(url, user, password);
pathInfoRepository = new JdbcPathInfoRepository(connection);
} catch (SQLException ex)
{
return error("Could not connect to pathinfo db at " + url + " with user name '" + user + "', password '" + password + "'");
}
} }
return verifiers;
}
File defaultArchiveDirectory = new File(defaultArchiveDirectoryPath); private FileSystemArchiveFileRepository getArchiveFileRepository(Properties properties, List<File> archiveDirectories)
if (defaultArchiveDirectory.exists() == false) {
boolean sharding = "true".equalsIgnoreCase(properties.getProperty(SHARDING));
IFileLocator fileLocator;
if (sharding)
{
fileLocator = new ShardingFileLocator();
} else
{ {
return error("Default archive directory " + defaultArchiveDirectoryPath + " does not exist"); fileLocator = new FlatFileLocator();
} }
String mappingFilePath = properties.getProperty(MAPPING_FILE); FileSystemArchiveFileRepository fileFinder = new FileSystemArchiveFileRepository(archiveDirectories, fileLocator);
return fileFinder;
}
private List<File> getArchiveDirectories(Properties properties, File defaultArchiveDirectory) throws ConfigurationException
{
List<File> mappedArchiveDirectories = new ArrayList<File>(); List<File> mappedArchiveDirectories = new ArrayList<File>();
String mappingFilePath = properties.getProperty(MAPPING_FILE);
if (mappingFilePath != null) if (mappingFilePath != null)
{ {
File mappingFile = new File(mappingFilePath); File mappingFile = new File(mappingFilePath);
if (mappingFile.exists() == false) if (mappingFile.exists() == false)
{ {
return error("Mapping file " + mappingFilePath + " defined in " + servicePropertiesPath + " does not exist"); throw new ConfigurationException("Mapping file " + mappingFilePath + " does not exist");
} }
try try
...@@ -169,7 +159,8 @@ public class DataSetArchiveVerificationBatchFactory ...@@ -169,7 +159,8 @@ public class DataSetArchiveVerificationBatchFactory
File archiveDirectory = new File(archiveDirectoryPath); File archiveDirectory = new File(archiveDirectoryPath);
if (archiveDirectory.exists() == false) if (archiveDirectory.exists() == false)
{ {
return error("Archive directory " + archiveDirectoryPath + " specified in mapping file " + mappingFilePath throw new ConfigurationException("Archive directory " + archiveDirectoryPath + " specified in mapping file "
+ mappingFilePath
+ " does not exist"); + " does not exist");
} }
mappedArchiveDirectories.add(archiveDirectory); mappedArchiveDirectories.add(archiveDirectory);
...@@ -177,53 +168,121 @@ public class DataSetArchiveVerificationBatchFactory ...@@ -177,53 +168,121 @@ public class DataSetArchiveVerificationBatchFactory
br.close(); br.close();
} catch (IOException e) } catch (IOException e)
{ {
return error("I/O error: " + e.getMessage()); throw new ConfigurationException("I/O error: " + e.getMessage());
} }
} }
List<File> archiveDirectories = new ArrayList<File>(); List<File> archiveDirectories = new ArrayList<File>();
archiveDirectories.add(defaultArchiveDirectory); archiveDirectories.add(defaultArchiveDirectory);
archiveDirectories.addAll(mappedArchiveDirectories); archiveDirectories.addAll(mappedArchiveDirectories);
return archiveDirectories;
}
boolean sharding = "true".equalsIgnoreCase(properties.getProperty(SHARDING)); private IArchiveFileMetaDataRepository getPathInfoRepository(Properties properties) throws ConfigurationException
IFileLocator fileLocator; {
if (sharding) IArchiveFileMetaDataRepository pathInfoRepository = null;
String databaseName = properties.getProperty(DATABASE_NAME);
String databaseKind = properties.getProperty(DATABASE_KIND);
if (databaseName != null && databaseKind != null)
{ {
fileLocator = new ShardingFileLocator(); String user = properties.getProperty(DATABASE_USER);
} else String password = properties.getProperty(DATABASE_PASSWORD);
if (user == null || user.isEmpty())
{
user = "postgres";
}
if (password == null)
{
password = "";
}
String url = "jdbc:postgresql://localhost/" + databaseName + "_" + databaseKind;
try
{
Connection connection = DriverManager.getConnection(url, user, password);
pathInfoRepository = new JdbcPathInfoRepository(connection);
} catch (SQLException ex)
{
throw new ConfigurationException("Could not connect to pathinfo db at " + url + " with user name '" + user + "', password '"
+ password + "'");
}
}
return pathInfoRepository;
}
private File getDefaultArchiveDirectory(Properties properties) throws ConfigurationException
{
String defaultArchiveDirectoryPath = properties.getProperty(DEFAULT_ARCHIVE_FOLDER);
if (defaultArchiveDirectoryPath == null)
{ {
fileLocator = new FlatFileLocator(); throw new ConfigurationException(servicePropertiesPath + " does not contain mandatory property " + DEFAULT_ARCHIVE_FOLDER);
} }
FileSystemArchiveFileRepository fileFinder = new FileSystemArchiveFileRepository(archiveDirectories, fileLocator); File defaultArchiveDirectory = new File(defaultArchiveDirectoryPath);
if (defaultArchiveDirectory.exists() == false)
{
throw new ConfigurationException("Default archive directory " + defaultArchiveDirectoryPath + " does not exist");
}
return defaultArchiveDirectory;
}
List<IArchiveFileVerifier> verifiers = new ArrayList<IArchiveFileVerifier>(); private Properties readServiceProperties(String path) throws ConfigurationException
verifiers.add(new ZipFileIntegrityVerifier()); {
File serviceProperties = new File(path);
servicePropertiesPath = serviceProperties.getAbsolutePath();
if (pathInfoRepository != null) if (serviceProperties.exists() == false)
{ {
verifiers.add(new ZipFileHeaderVerifier(pathInfoRepository, throw new ConfigurationException("File " + servicePropertiesPath + " does not exist");
"true".equalsIgnoreCase(properties.getProperty(PATHINFO_CHECKSUMS_COMPUTED)) ? CrcEnabled.TRUE : CrcEnabled.FALSE));
} }
DataSetArchiveVerifier verifier = new DataSetArchiveVerifier( Properties properties = new Properties();
fileFinder, try
new CompositeVerifier(verifiers)); {
properties.load(new FileInputStream(serviceProperties));
return new DataSetArchiveVerificationBatch(verifier, Arrays.copyOfRange(args, 1, args.length)); } catch (IOException ex)
{
throw new ConfigurationException("Could not read " + servicePropertiesPath + ": " + ex.getMessage());
}
return properties;
} }
private IDataSetArchiveVerificationBatch error(final String error) private class ConfigurationException extends Exception
{ {
return new IDataSetArchiveVerificationBatch() private static final long serialVersionUID = 1L;
{
@Override private final String message;
public SortedMap<String, IResult> run()
{ public ConfigurationException(String message)
SortedMap<String, IResult> result = new TreeMap<String, IResult>(); {
result.put("Failed to start", new FailedResult(error)); this.message = message;
return result; }
}
}; @Override
public String getMessage()
{
return message;
}
} }
private static final String DEFAULT_ARCHIVE_FOLDER = "archiver.default-archive-folder";
private static final String MAPPING_FILE = "archiver.mapping-file";
private static final String SHARDING = "archiver.with-sharding";
private static final String DATABASE_NAME = "path-info-db.basicDatabaseName";
private static final String DATABASE_KIND = "path-info-db.databaseKind";
private static final String DATABASE_USER = "path-info-db.owner";
private static final String DATABASE_PASSWORD = "path-info-db.password";
private static final String PATHINFO_CHECKSUMS_COMPUTED = "post-registration.pathinfo-feeding.compute-checksum";
} }
...@@ -17,9 +17,11 @@ ...@@ -17,9 +17,11 @@
package ch.systemsx.cisd.openbis.dss.archiveverifier.cli; package ch.systemsx.cisd.openbis.dss.archiveverifier.cli;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.EnumMap;
import java.util.Map; import java.util.Map;
import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.IResult; import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.IResult;
import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.ResultType;
/** /**
* Prints the results of a dataset archive batch verification. * Prints the results of a dataset archive batch verification.
...@@ -38,43 +40,46 @@ public class ResultPrinter ...@@ -38,43 +40,46 @@ public class ResultPrinter
public void print(Map<String, IResult> results) public void print(Map<String, IResult> results)
{ {
int ok = 0; Map<ResultType, Integer> counts = createCountMap();
int failed = 0;
int notTested = 0;
for (String dataSet : results.keySet()) for (String dataSet : results.keySet())
{ {
IResult result = results.get(dataSet); IResult result = results.get(dataSet);
boolean success = result.success(); result.printTo(dataSet, out);
String file = result.getFile(); ResultType type = result.getType();
counts.put(type, counts.get(type) + 1);
}
printTotals(counts);
}
private Map<ResultType, Integer> createCountMap()
{
Map<ResultType, Integer> counts = new EnumMap<ResultType, Integer>(ResultType.class);
for (ResultType type : ResultType.values())
{
counts.put(type, 0);
}
return counts;
}
private void printTotals(Map<ResultType, Integer> counts)
{
int ok = counts.get(ResultType.OK);
int failed = counts.get(ResultType.FAILED);
int notTested = counts.get(ResultType.SKIPPED);
int fatal = counts.get(ResultType.FATAL);
int total = ok + failed;
if (success) if (fatal > 0)
{ {
out.println("OK - " + dataSet + " (" + file + ")"); return;
ok++;
} else if (file != null)
{
out.println("FAILED - " + dataSet + " (" + file + ")");
for (String error : result.getErrors())
{
out.println(" " + error);
}
failed++;
} else if (result.getErrors().isEmpty())
{
out.println("NOT TESTED - " + dataSet + " (file not found)");
notTested++;
} else
{
out.println(result.getErrors().get(0));
return;
}
} }
out.println(); out.println();
out.println("---"); out.println("---");
int total = ok + failed;
out.println("Total of " + total + " dataset archives tested."); out.println("Total of " + total + " dataset archives tested.");
if (failed == 0) if (failed == 0)
{ {
out.println("No errors found"); out.println("No errors found");
......
...@@ -48,5 +48,4 @@ public class DataSetPathInfo implements IArchiveFileContent ...@@ -48,5 +48,4 @@ public class DataSetPathInfo implements IArchiveFileContent
PathInfoEntry entry = data.get(file); PathInfoEntry entry = data.get(file);
return entry != null ? entry.getSize() : null; return entry != null ? entry.getSize() : null;
} }
} }
...@@ -34,7 +34,7 @@ import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.IArchiveFileMetaDat ...@@ -34,7 +34,7 @@ import ch.systemsx.cisd.openbis.dss.archiveverifier.verifier.IArchiveFileMetaDat
public class JdbcPathInfoRepository implements IArchiveFileMetaDataRepository public class JdbcPathInfoRepository implements IArchiveFileMetaDataRepository
{ {
private static final String QUERY_PATHINFO = private static final String QUERY_PATHINFO =
"SELECT ds.location, dsf.relative_path, dsf.file_name, dsf.size_in_bytes, dsf.checksum_crc32, dsf.is_directory, dsf.last_modified " + "SELECT dsf.relative_path, dsf.file_name, dsf.size_in_bytes, dsf.checksum_crc32, dsf.is_directory, dsf.last_modified " +
"FROM data_sets ds, data_set_files dsf " + "FROM data_sets ds, data_set_files dsf " +
"WHERE dsf.dase_id = ds.id AND ds.code=?"; "WHERE dsf.dase_id = ds.id AND ds.code=?";
......
...@@ -17,6 +17,8 @@ ...@@ -17,6 +17,8 @@
package ch.systemsx.cisd.openbis.dss.archiveverifier.verifier; package ch.systemsx.cisd.openbis.dss.archiveverifier.verifier;
/** /**
* CRC check status.
*
* @author anttil * @author anttil
*/ */
public enum CrcEnabled public enum CrcEnabled
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
package ch.systemsx.cisd.openbis.dss.archiveverifier.batch; package ch.systemsx.cisd.openbis.dss.archiveverifier.batch;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import java.io.File; import java.io.File;
...@@ -35,26 +34,21 @@ public class DataSetArchiveVerifierTest ...@@ -35,26 +34,21 @@ public class DataSetArchiveVerifierTest
public void successfulVerificationCausesSuccessResult() throws Exception public void successfulVerificationCausesSuccessResult() throws Exception
{ {
IResult result = verifier.run(CODE_OF_DATASET_WITH_GOOD_ARCHIVE); IResult result = verifier.run(CODE_OF_DATASET_WITH_GOOD_ARCHIVE);
assertThat(result.success(), is(true)); assertThat(result.getType(), is(ResultType.OK));
assertThat(result.getFile(), is(GOOD_ARCHIVE_FILE.getAbsolutePath()));
} }
@Test @Test
public void failingVerificationCausesFailedResult() throws Exception public void failingVerificationCausesFailedResult() throws Exception
{ {
IResult result = verifier.run(CODE_OF_DATASET_WITH_BAD_ARCHIVE); IResult result = verifier.run(CODE_OF_DATASET_WITH_BAD_ARCHIVE);
assertThat(result.success(), is(false)); assertThat(result.getType(), is(ResultType.FAILED));
assertThat(result.getFile(), is(BAD_ARCHIVE_FILE.getAbsolutePath()));
assertThat(result.getErrors().isEmpty(), is(false));
} }
@Test @Test
public void failureToLocateArchiveFileCausesFailedResult() throws Exception public void failureToLocateArchiveFileCausesFailedResult() throws Exception
{ {
IResult result = verifier.run(CODE_OF_DATASET_WITHOUT_AN_ARCHIVE_FILE); IResult result = verifier.run(CODE_OF_DATASET_WITHOUT_AN_ARCHIVE_FILE);
assertThat(result.success(), is(false)); assertThat(result.getType(), is(ResultType.SKIPPED));
assertThat(result.getFile(), is(nullValue()));
assertThat(result.getErrors().isEmpty(), is(true));
} }
@BeforeMethod @BeforeMethod
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment