diff --git a/dbmigration/.classpath b/dbmigration/.classpath index b5854f63a0653f2b1c038b5f2e81899d9bcfbf2c..328eeb31858bb1d4ce392320ee9102dbcbcacc1c 100644 --- a/dbmigration/.classpath +++ b/dbmigration/.classpath @@ -18,5 +18,6 @@ <classpathentry kind="lib" path="/libraries/postgresql/postgresql.jar" sourcepath="/libraries/postgresql/src.zip"/> <classpathentry kind="lib" path="/libraries/commons-io/commons-io.jar" sourcepath="/libraries/commons-io/src.zip"/> <classpathentry kind="lib" path="/libraries/commons-logging/commons-logging.jar" sourcepath="/libraries/commons-logging/src.zip"/> + <classpathentry kind="lib" path="/libraries/h2/h2.jar" sourcepath="/libraries/h2/h2-src.zip"/> <classpathentry kind="output" path="targets/classes"/> </classpath> diff --git a/dbmigration/source/java/ch/systemsx/cisd/dbmigration/DatabaseEngine.java b/dbmigration/source/java/ch/systemsx/cisd/dbmigration/DatabaseEngine.java index f844682ce4b790bf853304d8c4200b69f148631f..323b5e3fc05bf107218b5efe8570ce6e7b3431c9 100644 --- a/dbmigration/source/java/ch/systemsx/cisd/dbmigration/DatabaseEngine.java +++ b/dbmigration/source/java/ch/systemsx/cisd/dbmigration/DatabaseEngine.java @@ -28,6 +28,7 @@ import ch.systemsx.cisd.common.db.ISequencerHandler; import ch.systemsx.cisd.common.exceptions.CheckedExceptionTunnel; import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException; import ch.systemsx.cisd.common.db.PostgreSQLSequencerHandler; +import ch.systemsx.cisd.dbmigration.h2.H2DAOFactory; import ch.systemsx.cisd.dbmigration.postgresql.PostgreSQLDAOFactory; /** @@ -39,11 +40,11 @@ public enum DatabaseEngine { POSTGRESQL("postgresql", org.postgresql.Driver.class, PostgreSQLDAOFactory.class, new DefaultLobHandler(), new PostgreSQLSequencerHandler(), "jdbc:postgresql:{0}", "jdbc:postgresql:{0}{1}", "//localhost/", - "postgres"); + "postgres"), -// H2("h2", org.h2.Driver.class, H2DAOFactory.class, new DefaultLobHandler(), -// new PostgreSQLSequencerHandler(), "jdbc:h2:{0}{1};DB_CLOSE_DELAY=-1", -// "jdbc:h2:{0}{1};DB_CLOSE_DELAY=-1", "mem:", ""); + H2("h2", org.h2.Driver.class, H2DAOFactory.class, new DefaultLobHandler(), + new PostgreSQLSequencerHandler(), "jdbc:h2:{0}{1};DB_CLOSE_DELAY=-1", + "jdbc:h2:{0}{1};DB_CLOSE_DELAY=-1", "mem:", ""); private static Map<String, DatabaseEngine> engines = initEngineMap(); diff --git a/dbmigration/source/java/ch/systemsx/cisd/dbmigration/h2/H2AdminDAO.java b/dbmigration/source/java/ch/systemsx/cisd/dbmigration/h2/H2AdminDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..e66d3d0657b8e1e7ed7cb223320aaace93b164bb --- /dev/null +++ b/dbmigration/source/java/ch/systemsx/cisd/dbmigration/h2/H2AdminDAO.java @@ -0,0 +1,182 @@ +/* + * Copyright 2008 ETH Zuerich, CISD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ch.systemsx.cisd.dbmigration.h2; + +import java.io.File; +import java.io.FilenameFilter; +import java.sql.SQLException; +import java.util.Arrays; + +import javax.sql.DataSource; + +import org.apache.log4j.Logger; +import org.h2.tools.DeleteDbFiles; +import org.springframework.jdbc.core.simple.SimpleJdbcDaoSupport; + +import ch.systemsx.cisd.common.Script; +import ch.systemsx.cisd.common.db.ISqlScriptExecutor; +import ch.systemsx.cisd.common.exceptions.CheckedExceptionTunnel; +import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException; +import ch.systemsx.cisd.common.logging.LogCategory; +import ch.systemsx.cisd.common.logging.LogFactory; +import ch.systemsx.cisd.common.utilities.FileUtilities; +import ch.systemsx.cisd.dbmigration.DatabaseVersionLogDAO; +import ch.systemsx.cisd.dbmigration.IDatabaseAdminDAO; +import ch.systemsx.cisd.dbmigration.IMassUploader; +import ch.systemsx.cisd.dbmigration.MassUploadFileType; + +/** + * Implementation of {@link IDatabaseAdminDAO} for H2. + * + * @author Bernd Rinn + */ +public class H2AdminDAO extends SimpleJdbcDaoSupport implements IDatabaseAdminDAO +{ + private static final String DROP_ALL_OBJECTS_SQL = "drop all objects;"; + + private static final String SQL_FILE_TYPE = ".sql"; + + private static final String CREATE_TABLE_DATABASE_VERSION_LOGS_SQL = + "create table " + DatabaseVersionLogDAO.DB_VERSION_LOG + " (db_version varchar(4) not null, " + + "module_name varchar(250), run_status varchar(10), run_status_timestamp timestamp, " + + "module_code bytea, run_exception bytea);"; + + private static final Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION, H2AdminDAO.class); + + private final String databaseName; + + private final ISqlScriptExecutor scriptExecutor; + + private final IMassUploader massUploader; + + /** + * Creates an instance. + * + * @param dataSource Data source able to create/drop the specified database. + * @param scriptExecutor An executor of SQL scripts within the new database. + * @param massUploader A class that can perform mass (batch) uploads into database tables. + * @param databaseName Name of the database. + */ + public H2AdminDAO(DataSource dataSource, ISqlScriptExecutor scriptExecutor, IMassUploader massUploader, String databaseName) + { + this.scriptExecutor = scriptExecutor; + this.massUploader = massUploader; + this.databaseName = databaseName; + setDataSource(dataSource); + } + + public String getDatabaseName() + { + return databaseName; + } + + public void createOwner() + { + // Creation of the user happens "on the fly" with H2 + } + + public void createDatabase() + { + // Creation of databases happens "on the fly" with H2, we only need to create the database_version_logs table + createDatabaseVersionLogsTable(); + } + + private void createDatabaseVersionLogsTable() + { + try + { + scriptExecutor.execute(new Script("create database_version_logs table", + CREATE_TABLE_DATABASE_VERSION_LOGS_SQL), true, null); + } catch (RuntimeException ex) + { + operationLog.error("Failed to create database_version_logs table.", ex); + throw ex; + } + } + + public void dropDatabase() + { + scriptExecutor.execute(new Script("drop database", DROP_ALL_OBJECTS_SQL), true, null); + try + { + DeleteDbFiles.execute("targets", databaseName, true); + } catch (SQLException ex) + { + throw new CheckedExceptionTunnel(ex); + } + } + + public void restoreDatabaseFromDump(File dumpFolder, String version) + { + createDatabaseVersionLogsTable(); + final Script schemaScript = tryLoadScript(dumpFolder, "schema", version); + scriptExecutor.execute(schemaScript, true, null); + final File[] massUploadFiles = getMassUploadFiles(dumpFolder); + massUploader.performMassUpload(massUploadFiles); + final Script finishScript = tryLoadScript(dumpFolder, "finish", version); + scriptExecutor.execute(finishScript, true, null); + } + + private Script tryLoadScript(final File dumpFolder, String prefix, String version) + throws ConfigurationFailureException + { + final File scriptFile = new File(dumpFolder, prefix + "-" + version + SQL_FILE_TYPE); + final Script script = new Script(scriptFile.getPath(), FileUtilities.loadToString(scriptFile), version); + if (script == null) + { + final String message = "No " + prefix + " script found for version " + version; + operationLog.error(message); + throw new ConfigurationFailureException(message); + } + return script; + } + + /** + * Returns the files determined for mass uploading. + */ + private File[] getMassUploadFiles(File dumpFolder) + { + if (operationLog.isDebugEnabled()) + { + operationLog.debug("Searching for mass upload files in directory '" + dumpFolder.getAbsolutePath() + "'."); + } + String[] csvFiles = dumpFolder.list(new FilenameFilter() + { + public boolean accept(File dir, String name) + { + return MassUploadFileType.CSV.isOfType(name) || MassUploadFileType.TSV.isOfType(name); + } + }); + if (csvFiles == null) + { + operationLog.warn("Path '" + dumpFolder.getAbsolutePath() + "' is not a directory."); + return new File[0]; + } + Arrays.sort(csvFiles); + if (operationLog.isInfoEnabled()) + { + operationLog.info("Found " + csvFiles.length + " files for mass uploading."); + } + final File[] csvPaths = new File[csvFiles.length]; + for (int i = 0; i < csvFiles.length; ++i) + { + csvPaths[i] = new File(dumpFolder, csvFiles[i]); + } + return csvPaths; + } + +} diff --git a/dbmigration/source/java/ch/systemsx/cisd/dbmigration/h2/H2DAOFactory.java b/dbmigration/source/java/ch/systemsx/cisd/dbmigration/h2/H2DAOFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..6c7560d84319081286b5dc1e72c5876aa4c7e800 --- /dev/null +++ b/dbmigration/source/java/ch/systemsx/cisd/dbmigration/h2/H2DAOFactory.java @@ -0,0 +1,87 @@ +/* + * Copyright 2008 ETH Zuerich, CISD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ch.systemsx.cisd.dbmigration.h2; + +import java.sql.SQLException; + +import javax.sql.DataSource; + +import ch.systemsx.cisd.common.db.ISqlScriptExecutor; +import ch.systemsx.cisd.common.exceptions.CheckedExceptionTunnel; +import ch.systemsx.cisd.dbmigration.DatabaseConfigurationContext; +import ch.systemsx.cisd.dbmigration.DatabaseVersionLogDAO; +import ch.systemsx.cisd.dbmigration.IDAOFactory; +import ch.systemsx.cisd.dbmigration.IDatabaseAdminDAO; +import ch.systemsx.cisd.dbmigration.IDatabaseVersionLogDAO; +import ch.systemsx.cisd.dbmigration.IMassUploader; +import ch.systemsx.cisd.dbmigration.SqlScriptExecutor; + +/** + * Implementation of {@link IDAOFactory} for H2. + * + * @author Bernd Rinn + */ +public class H2DAOFactory implements IDAOFactory +{ + private final IDatabaseAdminDAO databaseDAO; + + private final ISqlScriptExecutor sqlScriptExecutor; + + private final IDatabaseVersionLogDAO databaseVersionLogDAO; + + private final IMassUploader massUploader; + + /** + * Creates an instance based on the specified configuration context. + */ + public H2DAOFactory(DatabaseConfigurationContext context) + { + final DataSource dataSource = context.getDataSource(); + sqlScriptExecutor = new SqlScriptExecutor(dataSource, context.isScriptSingleStepMode()); + databaseVersionLogDAO = new DatabaseVersionLogDAO(dataSource, context.getLobHandler()); + try + { + massUploader = new H2MassUploader(dataSource, context.getSequenceNameMapper()); + } catch (SQLException ex) + { + throw new CheckedExceptionTunnel(ex); + } + databaseDAO = + new H2AdminDAO(context.getAdminDataSource(), sqlScriptExecutor, massUploader, context.getDatabaseName()); + } + + public IDatabaseAdminDAO getDatabaseDAO() + { + return databaseDAO; + } + + public ISqlScriptExecutor getSqlScriptExecutor() + { + return sqlScriptExecutor; + } + + public IDatabaseVersionLogDAO getDatabaseVersionLogDAO() + { + return databaseVersionLogDAO; + } + + public IMassUploader getMassUploader() + { + return massUploader; + } + +} diff --git a/dbmigration/source/java/ch/systemsx/cisd/dbmigration/h2/H2MassUploader.java b/dbmigration/source/java/ch/systemsx/cisd/dbmigration/h2/H2MassUploader.java new file mode 100644 index 0000000000000000000000000000000000000000..e00ff10953a19a4d0536d845c1bfd724f909ca11 --- /dev/null +++ b/dbmigration/source/java/ch/systemsx/cisd/dbmigration/h2/H2MassUploader.java @@ -0,0 +1,263 @@ +/* + * Copyright 2008 ETH Zuerich, CISD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ch.systemsx.cisd.dbmigration.h2; + +import static ch.systemsx.cisd.dbmigration.MassUploadFileType.TSV; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStreamReader; +import java.sql.DatabaseMetaData; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.List; + +import javax.sql.DataSource; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; +import org.springframework.dao.DataAccessException; +import org.springframework.jdbc.UncategorizedSQLException; +import org.springframework.jdbc.core.BatchPreparedStatementSetter; +import org.springframework.jdbc.core.simple.SimpleJdbcDaoSupport; + +import ch.systemsx.cisd.common.db.ISequenceNameMapper; +import ch.systemsx.cisd.common.exceptions.CheckedExceptionTunnel; +import ch.systemsx.cisd.common.logging.LogCategory; +import ch.systemsx.cisd.common.logging.LogFactory; +import ch.systemsx.cisd.dbmigration.IMassUploader; + +/** + * A {@link IMassUploader} for the H2 database. + * + * @author Bernd Rinn + */ +public class H2MassUploader extends SimpleJdbcDaoSupport implements IMassUploader +{ + private static final Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION, H2MassUploader.class); + + private final ISequenceNameMapper sequenceNameMapper; + + /** + * Creates an instance for the specified data source and sequence mapper. + */ + public H2MassUploader(DataSource dataSource, ISequenceNameMapper sequenceNameMapper) throws SQLException + { + this.sequenceNameMapper = sequenceNameMapper; + setDataSource(dataSource); + } + + private final class MassUploadRecord + { + final File massUploadFile; + + final String tableName; + + final BitSet isBinaryColumn; + + MassUploadRecord(File massUploadFile, String tableName, BitSet isBinaryColumn) + { + this.massUploadFile = massUploadFile; + this.tableName = tableName; + this.isBinaryColumn = isBinaryColumn; + } + } + + public void performMassUpload(File[] massUploadFiles) + { + String task = "Get database metadata"; + try + { + final List<MassUploadRecord> massUploadRecords = new ArrayList<MassUploadRecord>(massUploadFiles.length); + final DatabaseMetaData dbMetaData = getConnection().getMetaData(); + try + { + for (File massUploadFile : massUploadFiles) + { + final String[] splitName = StringUtils.split(massUploadFile.getName(), "="); + assert splitName.length == 2 : "Missing '=' in name of file '" + massUploadFile.getName() + "'."; + final String tableNameWithExtension = splitName[1]; + boolean tsvFileType = TSV.isOfType(tableNameWithExtension); + assert tsvFileType : "Not a " + TSV.getFileType() + " file: " + massUploadFile.getName(); + final String tableName = tableNameWithExtension.substring(0, tableNameWithExtension.lastIndexOf('.')); + final BitSet isBinaryColumn = findBinaryColumns(dbMetaData, tableName); + massUploadRecords.add(new MassUploadRecord(massUploadFile, tableName, isBinaryColumn)); + } + } finally + { + task = "Close connection"; + dbMetaData.getConnection().close(); + } + for (MassUploadRecord record : massUploadRecords) + { + performMassUpload(record); + } + for (MassUploadRecord record : massUploadRecords) + { + fixSequence(record.tableName); + } + } catch (SQLException ex) + { + throw new UncategorizedSQLException(task, "UNKNOWN", ex); + } + } + + private void performMassUpload(final MassUploadRecord record) + { + try + { + if (operationLog.isInfoEnabled()) + { + operationLog.info("Perform mass upload of file '" + record.massUploadFile + "' to table '" + + record.tableName + "'."); + } + final List<String[]> rows = readTSVFile(record.massUploadFile); + final int numberOfRows = rows.size(); + final int numberOfColumns = (numberOfRows > 0) ? rows.get(0).length : 0; + final StringBuilder insertSql = new StringBuilder(); + insertSql.append("insert into "); + insertSql.append(record.tableName); + insertSql.append(" values ("); + for (int i = 0; i < numberOfColumns; i++) + { + insertSql.append("?,"); + } + insertSql.setLength(insertSql.length() - 1); + insertSql.append(')'); + getJdbcTemplate().batchUpdate(insertSql.toString(), new BatchPreparedStatementSetter() + { + public int getBatchSize() + { + return numberOfRows; + } + + public void setValues(PreparedStatement ps, int rowNo) throws SQLException + { + for (int colNo = 0; colNo < numberOfColumns; ++colNo) + { + ps.setObject(colNo + 1, tryGetValue(rowNo, colNo)); + } + } + + private Object tryGetValue(int rowNo, int colNo) + { + final String stringValueOrNull = rows.get(rowNo)[colNo]; + if (stringValueOrNull == null) + { + return null; + } + if (record.isBinaryColumn.get(colNo)) + { + return stringValueOrNull.getBytes(); + } else + { + return stringValueOrNull; + } + } + }); + } catch (Exception ex) + { + throw CheckedExceptionTunnel.wrapIfNecessary(ex); + } + } + + private BitSet findBinaryColumns(final DatabaseMetaData dbMetaData, final String tableName) throws SQLException + { + final BitSet binary = new BitSet(); + final ResultSet rs = dbMetaData.getColumns(null, null, tableName.toUpperCase(), "%"); + int columnNo = 0; + while (rs.next()) + { + final int typeCode = rs.getInt(5); + binary.set(columnNo, typeCode == Types.BINARY || typeCode == Types.VARBINARY); + ++columnNo; + } + rs.close(); + return binary; + } + + private List<String[]> readTSVFile(File tsvFile) throws IOException + { + final List<String[]> result = new ArrayList<String[]>(); + final BufferedReader reader = new BufferedReader(new InputStreamReader(FileUtils.openInputStream(tsvFile))); + try + { + String line = reader.readLine(); + int numberOfColumns = -1; + while (line != null) + { + if (line.trim().length() > 0) + { + final String[] cols = StringUtils.splitPreserveAllTokens(line, '\t'); + if (numberOfColumns < 0) + { + numberOfColumns = cols.length; + } + if (numberOfColumns != cols.length) + { + throw new IllegalArgumentException("line '" + line + "', cols found: " + cols.length + + ", cols expected: " + numberOfColumns); + } + for (int i = 0; i < cols.length; ++i) + { + cols[i] = StringUtils.replace(cols[i], "\\\\011", "\t"); + cols[i] = StringUtils.replace(cols[i], "\\\\012", "\n"); + if ("\\N".equals(cols[i])) + { + cols[i] = null; + } + } + result.add(cols); + } + line = reader.readLine(); + } + } finally + { + IOUtils.closeQuietly(reader); + } + return result; + } + + private void fixSequence(String tableName) + { + final String sequenceName = sequenceNameMapper.getSequencerForTable(tableName); + if (sequenceName == null) + { + return; + } + try + { + final long maxId = getSimpleJdbcTemplate().queryForLong(String.format("select max(id) from %s", tableName)); + final long newSequenceValue = maxId + 1; + operationLog.info("Updating sequence " + sequenceName + " for table " + tableName + " to value " + + newSequenceValue); + getJdbcTemplate().execute( + String.format("alter sequence %s restart with %d", sequenceName, newSequenceValue)); + } catch (DataAccessException ex) + { + operationLog.error("Failed to set new value for sequence '" + sequenceName + "' of table '" + tableName + + "'.", ex); + } + } +}