Skip to content
Snippets Groups Projects
Commit d53a95cb authored by brinn's avatar brinn
Browse files

add: support for the H2 database

SVN: 3322
parent 17132450
No related branches found
No related tags found
No related merge requests found
......@@ -18,5 +18,6 @@
<classpathentry kind="lib" path="/libraries/postgresql/postgresql.jar" sourcepath="/libraries/postgresql/src.zip"/>
<classpathentry kind="lib" path="/libraries/commons-io/commons-io.jar" sourcepath="/libraries/commons-io/src.zip"/>
<classpathentry kind="lib" path="/libraries/commons-logging/commons-logging.jar" sourcepath="/libraries/commons-logging/src.zip"/>
<classpathentry kind="lib" path="/libraries/h2/h2.jar" sourcepath="/libraries/h2/h2-src.zip"/>
<classpathentry kind="output" path="targets/classes"/>
</classpath>
......@@ -28,6 +28,7 @@ import ch.systemsx.cisd.common.db.ISequencerHandler;
import ch.systemsx.cisd.common.exceptions.CheckedExceptionTunnel;
import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
import ch.systemsx.cisd.common.db.PostgreSQLSequencerHandler;
import ch.systemsx.cisd.dbmigration.h2.H2DAOFactory;
import ch.systemsx.cisd.dbmigration.postgresql.PostgreSQLDAOFactory;
/**
......@@ -39,11 +40,11 @@ public enum DatabaseEngine
{
POSTGRESQL("postgresql", org.postgresql.Driver.class, PostgreSQLDAOFactory.class, new DefaultLobHandler(),
new PostgreSQLSequencerHandler(), "jdbc:postgresql:{0}", "jdbc:postgresql:{0}{1}", "//localhost/",
"postgres");
"postgres"),
// H2("h2", org.h2.Driver.class, H2DAOFactory.class, new DefaultLobHandler(),
// new PostgreSQLSequencerHandler(), "jdbc:h2:{0}{1};DB_CLOSE_DELAY=-1",
// "jdbc:h2:{0}{1};DB_CLOSE_DELAY=-1", "mem:", "");
H2("h2", org.h2.Driver.class, H2DAOFactory.class, new DefaultLobHandler(),
new PostgreSQLSequencerHandler(), "jdbc:h2:{0}{1};DB_CLOSE_DELAY=-1",
"jdbc:h2:{0}{1};DB_CLOSE_DELAY=-1", "mem:", "");
private static Map<String, DatabaseEngine> engines = initEngineMap();
......
/*
* Copyright 2008 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.dbmigration.h2;
import java.io.File;
import java.io.FilenameFilter;
import java.sql.SQLException;
import java.util.Arrays;
import javax.sql.DataSource;
import org.apache.log4j.Logger;
import org.h2.tools.DeleteDbFiles;
import org.springframework.jdbc.core.simple.SimpleJdbcDaoSupport;
import ch.systemsx.cisd.common.Script;
import ch.systemsx.cisd.common.db.ISqlScriptExecutor;
import ch.systemsx.cisd.common.exceptions.CheckedExceptionTunnel;
import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
import ch.systemsx.cisd.common.logging.LogCategory;
import ch.systemsx.cisd.common.logging.LogFactory;
import ch.systemsx.cisd.common.utilities.FileUtilities;
import ch.systemsx.cisd.dbmigration.DatabaseVersionLogDAO;
import ch.systemsx.cisd.dbmigration.IDatabaseAdminDAO;
import ch.systemsx.cisd.dbmigration.IMassUploader;
import ch.systemsx.cisd.dbmigration.MassUploadFileType;
/**
* Implementation of {@link IDatabaseAdminDAO} for H2.
*
* @author Bernd Rinn
*/
public class H2AdminDAO extends SimpleJdbcDaoSupport implements IDatabaseAdminDAO
{
private static final String DROP_ALL_OBJECTS_SQL = "drop all objects;";
private static final String SQL_FILE_TYPE = ".sql";
private static final String CREATE_TABLE_DATABASE_VERSION_LOGS_SQL =
"create table " + DatabaseVersionLogDAO.DB_VERSION_LOG + " (db_version varchar(4) not null, "
+ "module_name varchar(250), run_status varchar(10), run_status_timestamp timestamp, "
+ "module_code bytea, run_exception bytea);";
private static final Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION, H2AdminDAO.class);
private final String databaseName;
private final ISqlScriptExecutor scriptExecutor;
private final IMassUploader massUploader;
/**
* Creates an instance.
*
* @param dataSource Data source able to create/drop the specified database.
* @param scriptExecutor An executor of SQL scripts within the new database.
* @param massUploader A class that can perform mass (batch) uploads into database tables.
* @param databaseName Name of the database.
*/
public H2AdminDAO(DataSource dataSource, ISqlScriptExecutor scriptExecutor, IMassUploader massUploader, String databaseName)
{
this.scriptExecutor = scriptExecutor;
this.massUploader = massUploader;
this.databaseName = databaseName;
setDataSource(dataSource);
}
public String getDatabaseName()
{
return databaseName;
}
public void createOwner()
{
// Creation of the user happens "on the fly" with H2
}
public void createDatabase()
{
// Creation of databases happens "on the fly" with H2, we only need to create the database_version_logs table
createDatabaseVersionLogsTable();
}
private void createDatabaseVersionLogsTable()
{
try
{
scriptExecutor.execute(new Script("create database_version_logs table",
CREATE_TABLE_DATABASE_VERSION_LOGS_SQL), true, null);
} catch (RuntimeException ex)
{
operationLog.error("Failed to create database_version_logs table.", ex);
throw ex;
}
}
public void dropDatabase()
{
scriptExecutor.execute(new Script("drop database", DROP_ALL_OBJECTS_SQL), true, null);
try
{
DeleteDbFiles.execute("targets", databaseName, true);
} catch (SQLException ex)
{
throw new CheckedExceptionTunnel(ex);
}
}
public void restoreDatabaseFromDump(File dumpFolder, String version)
{
createDatabaseVersionLogsTable();
final Script schemaScript = tryLoadScript(dumpFolder, "schema", version);
scriptExecutor.execute(schemaScript, true, null);
final File[] massUploadFiles = getMassUploadFiles(dumpFolder);
massUploader.performMassUpload(massUploadFiles);
final Script finishScript = tryLoadScript(dumpFolder, "finish", version);
scriptExecutor.execute(finishScript, true, null);
}
private Script tryLoadScript(final File dumpFolder, String prefix, String version)
throws ConfigurationFailureException
{
final File scriptFile = new File(dumpFolder, prefix + "-" + version + SQL_FILE_TYPE);
final Script script = new Script(scriptFile.getPath(), FileUtilities.loadToString(scriptFile), version);
if (script == null)
{
final String message = "No " + prefix + " script found for version " + version;
operationLog.error(message);
throw new ConfigurationFailureException(message);
}
return script;
}
/**
* Returns the files determined for mass uploading.
*/
private File[] getMassUploadFiles(File dumpFolder)
{
if (operationLog.isDebugEnabled())
{
operationLog.debug("Searching for mass upload files in directory '" + dumpFolder.getAbsolutePath() + "'.");
}
String[] csvFiles = dumpFolder.list(new FilenameFilter()
{
public boolean accept(File dir, String name)
{
return MassUploadFileType.CSV.isOfType(name) || MassUploadFileType.TSV.isOfType(name);
}
});
if (csvFiles == null)
{
operationLog.warn("Path '" + dumpFolder.getAbsolutePath() + "' is not a directory.");
return new File[0];
}
Arrays.sort(csvFiles);
if (operationLog.isInfoEnabled())
{
operationLog.info("Found " + csvFiles.length + " files for mass uploading.");
}
final File[] csvPaths = new File[csvFiles.length];
for (int i = 0; i < csvFiles.length; ++i)
{
csvPaths[i] = new File(dumpFolder, csvFiles[i]);
}
return csvPaths;
}
}
/*
* Copyright 2008 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.dbmigration.h2;
import java.sql.SQLException;
import javax.sql.DataSource;
import ch.systemsx.cisd.common.db.ISqlScriptExecutor;
import ch.systemsx.cisd.common.exceptions.CheckedExceptionTunnel;
import ch.systemsx.cisd.dbmigration.DatabaseConfigurationContext;
import ch.systemsx.cisd.dbmigration.DatabaseVersionLogDAO;
import ch.systemsx.cisd.dbmigration.IDAOFactory;
import ch.systemsx.cisd.dbmigration.IDatabaseAdminDAO;
import ch.systemsx.cisd.dbmigration.IDatabaseVersionLogDAO;
import ch.systemsx.cisd.dbmigration.IMassUploader;
import ch.systemsx.cisd.dbmigration.SqlScriptExecutor;
/**
* Implementation of {@link IDAOFactory} for H2.
*
* @author Bernd Rinn
*/
public class H2DAOFactory implements IDAOFactory
{
private final IDatabaseAdminDAO databaseDAO;
private final ISqlScriptExecutor sqlScriptExecutor;
private final IDatabaseVersionLogDAO databaseVersionLogDAO;
private final IMassUploader massUploader;
/**
* Creates an instance based on the specified configuration context.
*/
public H2DAOFactory(DatabaseConfigurationContext context)
{
final DataSource dataSource = context.getDataSource();
sqlScriptExecutor = new SqlScriptExecutor(dataSource, context.isScriptSingleStepMode());
databaseVersionLogDAO = new DatabaseVersionLogDAO(dataSource, context.getLobHandler());
try
{
massUploader = new H2MassUploader(dataSource, context.getSequenceNameMapper());
} catch (SQLException ex)
{
throw new CheckedExceptionTunnel(ex);
}
databaseDAO =
new H2AdminDAO(context.getAdminDataSource(), sqlScriptExecutor, massUploader, context.getDatabaseName());
}
public IDatabaseAdminDAO getDatabaseDAO()
{
return databaseDAO;
}
public ISqlScriptExecutor getSqlScriptExecutor()
{
return sqlScriptExecutor;
}
public IDatabaseVersionLogDAO getDatabaseVersionLogDAO()
{
return databaseVersionLogDAO;
}
public IMassUploader getMassUploader()
{
return massUploader;
}
}
/*
* Copyright 2008 ETH Zuerich, CISD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.systemsx.cisd.dbmigration.h2;
import static ch.systemsx.cisd.dbmigration.MassUploadFileType.TSV;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.List;
import javax.sql.DataSource;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.UncategorizedSQLException;
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.jdbc.core.simple.SimpleJdbcDaoSupport;
import ch.systemsx.cisd.common.db.ISequenceNameMapper;
import ch.systemsx.cisd.common.exceptions.CheckedExceptionTunnel;
import ch.systemsx.cisd.common.logging.LogCategory;
import ch.systemsx.cisd.common.logging.LogFactory;
import ch.systemsx.cisd.dbmigration.IMassUploader;
/**
* A {@link IMassUploader} for the H2 database.
*
* @author Bernd Rinn
*/
public class H2MassUploader extends SimpleJdbcDaoSupport implements IMassUploader
{
private static final Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION, H2MassUploader.class);
private final ISequenceNameMapper sequenceNameMapper;
/**
* Creates an instance for the specified data source and sequence mapper.
*/
public H2MassUploader(DataSource dataSource, ISequenceNameMapper sequenceNameMapper) throws SQLException
{
this.sequenceNameMapper = sequenceNameMapper;
setDataSource(dataSource);
}
private final class MassUploadRecord
{
final File massUploadFile;
final String tableName;
final BitSet isBinaryColumn;
MassUploadRecord(File massUploadFile, String tableName, BitSet isBinaryColumn)
{
this.massUploadFile = massUploadFile;
this.tableName = tableName;
this.isBinaryColumn = isBinaryColumn;
}
}
public void performMassUpload(File[] massUploadFiles)
{
String task = "Get database metadata";
try
{
final List<MassUploadRecord> massUploadRecords = new ArrayList<MassUploadRecord>(massUploadFiles.length);
final DatabaseMetaData dbMetaData = getConnection().getMetaData();
try
{
for (File massUploadFile : massUploadFiles)
{
final String[] splitName = StringUtils.split(massUploadFile.getName(), "=");
assert splitName.length == 2 : "Missing '=' in name of file '" + massUploadFile.getName() + "'.";
final String tableNameWithExtension = splitName[1];
boolean tsvFileType = TSV.isOfType(tableNameWithExtension);
assert tsvFileType : "Not a " + TSV.getFileType() + " file: " + massUploadFile.getName();
final String tableName = tableNameWithExtension.substring(0, tableNameWithExtension.lastIndexOf('.'));
final BitSet isBinaryColumn = findBinaryColumns(dbMetaData, tableName);
massUploadRecords.add(new MassUploadRecord(massUploadFile, tableName, isBinaryColumn));
}
} finally
{
task = "Close connection";
dbMetaData.getConnection().close();
}
for (MassUploadRecord record : massUploadRecords)
{
performMassUpload(record);
}
for (MassUploadRecord record : massUploadRecords)
{
fixSequence(record.tableName);
}
} catch (SQLException ex)
{
throw new UncategorizedSQLException(task, "UNKNOWN", ex);
}
}
private void performMassUpload(final MassUploadRecord record)
{
try
{
if (operationLog.isInfoEnabled())
{
operationLog.info("Perform mass upload of file '" + record.massUploadFile + "' to table '"
+ record.tableName + "'.");
}
final List<String[]> rows = readTSVFile(record.massUploadFile);
final int numberOfRows = rows.size();
final int numberOfColumns = (numberOfRows > 0) ? rows.get(0).length : 0;
final StringBuilder insertSql = new StringBuilder();
insertSql.append("insert into ");
insertSql.append(record.tableName);
insertSql.append(" values (");
for (int i = 0; i < numberOfColumns; i++)
{
insertSql.append("?,");
}
insertSql.setLength(insertSql.length() - 1);
insertSql.append(')');
getJdbcTemplate().batchUpdate(insertSql.toString(), new BatchPreparedStatementSetter()
{
public int getBatchSize()
{
return numberOfRows;
}
public void setValues(PreparedStatement ps, int rowNo) throws SQLException
{
for (int colNo = 0; colNo < numberOfColumns; ++colNo)
{
ps.setObject(colNo + 1, tryGetValue(rowNo, colNo));
}
}
private Object tryGetValue(int rowNo, int colNo)
{
final String stringValueOrNull = rows.get(rowNo)[colNo];
if (stringValueOrNull == null)
{
return null;
}
if (record.isBinaryColumn.get(colNo))
{
return stringValueOrNull.getBytes();
} else
{
return stringValueOrNull;
}
}
});
} catch (Exception ex)
{
throw CheckedExceptionTunnel.wrapIfNecessary(ex);
}
}
private BitSet findBinaryColumns(final DatabaseMetaData dbMetaData, final String tableName) throws SQLException
{
final BitSet binary = new BitSet();
final ResultSet rs = dbMetaData.getColumns(null, null, tableName.toUpperCase(), "%");
int columnNo = 0;
while (rs.next())
{
final int typeCode = rs.getInt(5);
binary.set(columnNo, typeCode == Types.BINARY || typeCode == Types.VARBINARY);
++columnNo;
}
rs.close();
return binary;
}
private List<String[]> readTSVFile(File tsvFile) throws IOException
{
final List<String[]> result = new ArrayList<String[]>();
final BufferedReader reader = new BufferedReader(new InputStreamReader(FileUtils.openInputStream(tsvFile)));
try
{
String line = reader.readLine();
int numberOfColumns = -1;
while (line != null)
{
if (line.trim().length() > 0)
{
final String[] cols = StringUtils.splitPreserveAllTokens(line, '\t');
if (numberOfColumns < 0)
{
numberOfColumns = cols.length;
}
if (numberOfColumns != cols.length)
{
throw new IllegalArgumentException("line '" + line + "', cols found: " + cols.length
+ ", cols expected: " + numberOfColumns);
}
for (int i = 0; i < cols.length; ++i)
{
cols[i] = StringUtils.replace(cols[i], "\\\\011", "\t");
cols[i] = StringUtils.replace(cols[i], "\\\\012", "\n");
if ("\\N".equals(cols[i]))
{
cols[i] = null;
}
}
result.add(cols);
}
line = reader.readLine();
}
} finally
{
IOUtils.closeQuietly(reader);
}
return result;
}
private void fixSequence(String tableName)
{
final String sequenceName = sequenceNameMapper.getSequencerForTable(tableName);
if (sequenceName == null)
{
return;
}
try
{
final long maxId = getSimpleJdbcTemplate().queryForLong(String.format("select max(id) from %s", tableName));
final long newSequenceValue = maxId + 1;
operationLog.info("Updating sequence " + sequenceName + " for table " + tableName + " to value "
+ newSequenceValue);
getJdbcTemplate().execute(
String.format("alter sequence %s restart with %d", sequenceName, newSequenceValue));
} catch (DataAccessException ex)
{
operationLog.error("Failed to set new value for sequence '" + sequenceName + "' of table '" + tableName
+ "'.", ex);
}
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment