diff --git a/rtd_yeastx/etc/log.xml b/rtd_yeastx/etc/log.xml new file mode 100644 index 0000000000000000000000000000000000000000..5cee0a68436a19a6c4ec4b14b580e6d9f75bf84f --- /dev/null +++ b/rtd_yeastx/etc/log.xml @@ -0,0 +1,19 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> + +<log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'> + + <appender name="STDOUT" class="org.apache.log4j.ConsoleAppender"> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p [%t] %c - %m%n"/> + </layout> + </appender> + + <appender name="NULL" class="org.apache.log4j.varia.NullAppender" /> + + <root> + <priority value ="info" /> + <appender-ref ref="STDOUT" /> + </root> + +</log4j:configuration> diff --git a/rtd_yeastx/etc/service.properties b/rtd_yeastx/etc/service.properties new file mode 100644 index 0000000000000000000000000000000000000000..07d265ca3bd11707815ff4641f340faea5a530bc --- /dev/null +++ b/rtd_yeastx/etc/service.properties @@ -0,0 +1,308 @@ +# Unique code of this Data Store Server. Not more than 40 characters. +data-store-server-code = DSS1 + +root = targets/yeastx + +# The root directory of the data store +storeroot-dir = ${root}/store + +# The directory where the command queue file is located; defaults to storeroot-dir +commandqueue-dir = ${root} + +# Port +port = 8444 + +# Session timeout in minutes +session-timeout = 30 + +# Path to the keystore +keystore.path = dist/etc/openBIS.keystore + +use-ssl = false + +# Password of the keystore +keystore.password = changeit + +# Key password of the keystore +keystore.key-password = changeit + +# The check interval (in seconds) +check-interval = 2 + +# The time-out for clean up work in the shutdown sequence (in seconds). +# Note that that the maximal time for the shutdown sequence to complete can be as large +# as twice this time. +shutdown-timeout = 2 + +#�If free disk space goes below value defined here, a notification email will be sent. +# Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is +# specified or if value is negative, the system will not be watching. +highwater-mark = 1048576 + +# The URL of the openBIS server +server-url = http://localhost:8888/openbis + +# The username to use when contacting the openBIS server +username = etlserver_yeastx + +# The password to use when contacting the openBIS server +password = doesnotmatter + +# The base URL for Web client access. +download-url = http://localhost:8444 + +# SMTP properties (must start with 'mail' to be considered). +mail.smtp.host = file://${root} +mail.from = datastore_server@localhost +mail.smtp.user = +mail.smtp.password = + +yeastx-databaseEngineCode = postgresql +yeastx-basicDatabaseName = metabol +yeastx-databaseKind = dev +yeastx-readOnlyGroup = metabol_readonly +yeastx-readWriteGroup = metabol_readwrite +yeastx-scriptFolder = sql + +# --------------------------------------------------------------------------- + +#maintenance-plugins=ds-remover + +ds-remover.class = ch.systemsx.cisd.yeastx.etl.MetabolDatabaseUpdater +ds-remover.interval = 10 +ds-remover.database.databaseEngineCode = ${yeastx-databaseEngineCode} +ds-remover.database.basicDatabaseName = ${yeastx-basicDatabaseName} +ds-remover.database.databaseKind = ${yeastx-databaseKind} + +# --------------------------------------------------------------------------- + +# list of additional web servlets which will be exposed +plugin-services = chromatogram-image-download-servlet +# class of the web servlet +chromatogram-image-download-servlet.class = ch.systemsx.cisd.openbis.dss.yeastx.server.EICMLChromatogramGeneratorServlet +# URL which will be mapped to this servlet +chromatogram-image-download-servlet.path = /datastore_server/chromatogram +chromatogram-image-download-servlet.database.databaseEngineCode = ${yeastx-databaseEngineCode} +chromatogram-image-download-servlet.database.basicDatabaseName = ${yeastx-basicDatabaseName} +chromatogram-image-download-servlet.database.databaseKind = ${yeastx-databaseKind} + + +# ------------------------ + +# Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name. +reporting-plugins = eicml-chromatograms-reporter, eicml-runs-reporter, eicml-chromatogram-images-reporter + +# Label of the plugin which will be shown for the users. +eicml-chromatograms-reporter.label = Show eicML chromatograms +# Comma separated list of dataset type codes which can be handled by this plugin. +eicml-chromatograms-reporter.dataset-types = EICML +# Plugin class specification (together with the list of packages this class belongs to). +eicml-chromatograms-reporter.class = ch.systemsx.cisd.yeastx.eicml.EICMLChromatogramsReporter +# The property file. Its content will be passed as a parameter to the plugin. +eicml-chromatograms-reporter.properties-file = +eicml-chromatograms-reporter.database.databaseEngineCode = ${yeastx-databaseEngineCode} +eicml-chromatograms-reporter.database.basicDatabaseName = ${yeastx-basicDatabaseName} +eicml-chromatograms-reporter.database.databaseKind = ${yeastx-databaseKind} +eicml-chromatograms-reporter.database.readOnlyGroup = ${yeastx-readOnlyGroup} +eicml-chromatograms-reporter.database.readWriteGroup = ${yeastx-readWriteGroup} +eicml-chromatograms-reporter.database.scriptFolder = ${yeastx-scriptFolder} + +# Label of the plugin which will be shown for the users. +eicml-runs-reporter.label = Show eicML runs +# Comma separated list of dataset type codes which can be handled by this plugin. +eicml-runs-reporter.dataset-types = EICML +# Plugin class specification (together with the list of packages this class belongs to). +eicml-runs-reporter.class = ch.systemsx.cisd.yeastx.eicml.EICMLRunsReporter +# The property file. Its content will be passed as a parameter to the plugin. +eicml-runs-reporter.properties-file = +eicml-runs-reporter.database.databaseEngineCode = ${yeastx-databaseEngineCode} +eicml-runs-reporter.database.basicDatabaseName = ${yeastx-basicDatabaseName} +eicml-runs-reporter.database.databaseKind = ${yeastx-databaseKind} + +# Label of the plugin which will be shown for the users. +eicml-chromatogram-images-reporter.label = Show eicML chromatogram images +# Comma separated list of dataset type codes which can be handled by this plugin. +eicml-chromatogram-images-reporter.dataset-types = EICML +# Plugin class specification (together with the list of packages this class belongs to). +eicml-chromatogram-images-reporter.class = ch.systemsx.cisd.yeastx.eicml.EICMLChromatogramImagesReporter +# The property file. Its content will be passed as a parameter to the plugin. +eicml-chromatogram-images-reporter.properties-file = +eicml-chromatogram-images-reporter.database.databaseEngineCode = ${yeastx-databaseEngineCode} +eicml-chromatogram-images-reporter.database.basicDatabaseName = ${yeastx-basicDatabaseName} +eicml-chromatogram-images-reporter.database.databaseKind = ${yeastx-databaseKind} +eicml-chromatogram-images-reporter.database.readOnlyGroup = ${yeastx-readOnlyGroup} +eicml-chromatogram-images-reporter.database.readWriteGroup = ${yeastx-readWriteGroup} +eicml-chromatogram-images-reporter.database.scriptFolder = ${yeastx-scriptFolder} + +# ------------------------ + +# The period of no write access that needs to pass before an incoming data item is considered +# complete and ready to be processed (in seconds) [default: 300]. +# Valid only when auto-detection method is used to determine if an incoming data are ready to be processed. +quiet-period = 3 + +# ------------------------ + +inputs=main-thread, eicml-uploader, fiaml-uploader, quantml-uploader + +# Globally used separator character which separates entities in a data set file name +data-set-file-name-entity-separator = . + +# --------------------------------------------------------------------------- +# 'main-thread' thread configuration +# --------------------------------------------------------------------------- + +# The directory to watch for incoming data. +main-thread.incoming-dir = ${root}/incoming + +# Determines when the incoming data should be considered complete and ready to be processed. +# Allowed values: +# - auto-detection - when no write access will be detected for a specified 'quite-period' +# - marker-file - when an appropriate marker file for the data exists. +# The default value is 'marker-file'. +main-thread.incoming-data-completeness-condition = auto-detection +main-thread.incoming-dir.format = +main-thread.reprocess-faulty-datasets = true + +sample-name-property-code = samplename +experiment-name-property-code = name +file-name-property-code = file_name + +gain-write-access-script = ../datastore_server_yeastx/takeCifsOwnershipRecursive.sh + +main-thread.dataset-handler = ch.systemsx.cisd.yeastx.etl.BatchDataSetHandler +main-thread.dataset-handler.preprocessing-script = ${gain-write-access-script} +main-thread.dataset-handler.preprocessing-script-max-retries = 4 +main-thread.dataset-handler.preprocessing-script-failure-interval = 3 +main-thread.dataset-handler.unique-sample-name-property-code = ${sample-name-property-code} +main-thread.dataset-handler.unique-experiment-name-property-code = ${experiment-name-property-code} + +# The extractor class to use for code extraction +main-thread.data-set-info-extractor = ch.systemsx.cisd.yeastx.etl.BatchDataSetInfoExtractor +main-thread.data-set-info-extractor.file-name-property-code = ${file-name-property-code} +main-thread.data-set-info-extractor.unique-sample-name-property-code = ${sample-name-property-code} +main-thread.data-set-info-extractor.unique-experiment-name-property-code = ${experiment-name-property-code} + +# The extractor class to use for type extraction +main-thread.type-extractor = ch.systemsx.cisd.yeastx.etl.TypeExtractorYeastX +# comma separated list of pairs: file-extension file-type +# It is assumed that for each file extension a dataset type with the same name is defined in openBIS. +# The corresponding file types have to be defined in openBIS as well. +# Files with unspecified extensions will have the file type and dataset type UNKNOWN in openBIS. +main-thread.type-extractor.file-types = pdf pdf, mat matlab, zip archive, eicml xml, fiaml xml, mzxml xml + +# The storage processor (IStorageProcessor implementation) +main-thread.storage-processor = ch.systemsx.cisd.yeastx.etl.StorageProcessorWithDropboxes +main-thread.storage-processor.eicml-dropbox-dir = ${root}/dropbox-eicml +main-thread.storage-processor.fiaml-dropbox-dir = ${root}/dropbox-fiaml +main-thread.storage-processor.entity-separator = . +main-thread.storage-processor.processor = ch.systemsx.cisd.yeastx.etl.StorageProcessorWithMLUploader +main-thread.storage-processor.processor.unique-sample-name-property-code = ${sample-name-property-code} +main-thread.storage-processor.processor.unique-experiment-name-property-code = ${experiment-name-property-code} +main-thread.storage-processor.processor.database.databaseEngineCode = ${yeastx-databaseEngineCode} +main-thread.storage-processor.processor.database.basicDatabaseName = ${yeastx-basicDatabaseName} +main-thread.storage-processor.processor.database.databaseKind = ${yeastx-databaseKind} +main-thread.storage-processor.processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor + +# --------------------------------------------------------------------------- +# plugin properties for the thread which uploads eicML files +# --------------------------------------------------------------------------- + +# The directory to watch for incoming data. +eicml-uploader.incoming-dir = ${root}/incoming-eicml +eicml-uploader.incoming-data-completeness-condition = auto-detection +eicml-uploader.incoming-dir.format = +eicml-uploader.reprocess-faulty-datasets = false + +# The extractor class to use for code extraction +eicml-uploader.data-set-info-extractor = ch.systemsx.cisd.yeastx.etl.DataSetInfoExtractorYeastX +eicml-uploader.data-set-info-extractor.file-name-property-code = ${file-name-property-code} +eicml-uploader.data-set-info-extractor.index-of-experiment-identifier = 0 +eicml-uploader.data-set-info-extractor.index-of-parent-data-set-codes = 1 +eicml-uploader.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator} + +# The extractor class to use for type extraction +eicml-uploader.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +eicml-uploader.type-extractor.file-format-type = XML +eicml-uploader.type-extractor.locator-type = RELATIVE_LOCATION +eicml-uploader.type-extractor.data-set-type = EICML +eicml-uploader.type-extractor.is-measured = false + +# The storage processor which uploads the content of the files to the additional database +eicml-uploader.storage-processor = ch.systemsx.cisd.yeastx.etl.StorageProcessorWithMLUploader +eicml-uploader.storage-processor.unique-sample-name-property-code = ${sample-name-property-code} +eicml-uploader.storage-processor.unique-experiment-name-property-code = ${experiment-name-property-code} +eicml-uploader.storage-processor.database.databaseEngineCode = ${yeastx-databaseEngineCode} +eicml-uploader.storage-processor.database.basicDatabaseName = ${yeastx-basicDatabaseName} +eicml-uploader.storage-processor.database.databaseKind = ${yeastx-databaseKind} +eicml-uploader.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor + +# ---------------- plugin properties for the thread which uploads fiaML files + +# The directory to watch for incoming data. +fiaml-uploader.incoming-dir = ${root}/incoming-fiaml +fiaml-uploader.incoming-data-completeness-condition = auto-detection +fiaml-uploader.incoming-dir.format = +fiaml-uploader.reprocess-faulty-datasets = false + +# The extractor class to use for code extraction +fiaml-uploader.data-set-info-extractor = ch.systemsx.cisd.yeastx.etl.DataSetInfoExtractorYeastX +fiaml-uploader.data-set-info-extractor.file-name-property-code = ${file-name-property-code} +fiaml-uploader.data-set-info-extractor.index-of-experiment-identifier = 0 +fiaml-uploader.data-set-info-extractor.index-of-parent-data-set-codes = 1 +fiaml-uploader.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator} + +# The extractor class to use for type extraction +fiaml-uploader.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +fiaml-uploader.type-extractor.file-format-type = XML +fiaml-uploader.type-extractor.locator-type = RELATIVE_LOCATION +fiaml-uploader.type-extractor.data-set-type = FIAML +fiaml-uploader.type-extractor.is-measured = false + +# The storage processor which uploads the content of the files to the additional database +fiaml-uploader.storage-processor = ch.systemsx.cisd.yeastx.etl.StorageProcessorWithMLUploader +fiaml-uploader.storage-processor.unique-sample-name-property-code = ${sample-name-property-code} +fiaml-uploader.storage-processor.unique-experiment-name-property-code = ${experiment-name-property-code} +fiaml-uploader.storage-processor.database.databaseEngineCode = ${yeastx-databaseEngineCode} +fiaml-uploader.storage-processor.database.basicDatabaseName = ${yeastx-basicDatabaseName} +fiaml-uploader.storage-processor.database.databaseKind = ${yeastx-databaseKind} +fiaml-uploader.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor + +# ---------------- plugin properties for the thread which uploads quantML files + +# The directory to watch for incoming data. +quantml-uploader.incoming-dir = ${root}/incoming-quantml +quantml-uploader.incoming-data-completeness-condition = auto-detection +quantml-uploader.incoming-dir.format = +quantml-uploader.reprocess-faulty-datasets = false + +# The extractor class to use for code extraction +quantml-uploader.data-set-info-extractor = ch.systemsx.cisd.yeastx.etl.QuantMLDataSetInfoExtractor +quantml-uploader.data-set-info-extractor.index-of-experiment-identifier = 0 +quantml-uploader.data-set-info-extractor.sub-entity-separator = & +# unused but required to be different than sub-entity-separator +quantml-uploader.data-set-info-extractor.entity-separator = . + +# The extractor class to use for type extraction +quantml-uploader.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +quantml-uploader.type-extractor.file-format-type = XML +quantml-uploader.type-extractor.locator-type = RELATIVE_LOCATION +quantml-uploader.type-extractor.data-set-type = UNKNOWN +quantml-uploader.type-extractor.is-measured = false + +# The storage processor which uploads the content of the files to the additional database +quantml-uploader.storage-processor = ch.systemsx.cisd.yeastx.etl.QuantMLStorageProcessor +quantml-uploader.storage-processor.preprocessing-script = ${gain-write-access-script} +quantml-uploader.storage-processor.unique-sample-name-property-code = ${sample-name-property-code} +quantml-uploader.storage-processor.unique-experiment-name-property-code = ${experiment-name-property-code} +quantml-uploader.storage-processor.database.databaseEngineCode = ${yeastx-databaseEngineCode} +quantml-uploader.storage-processor.database.basicDatabaseName = ${yeastx-basicDatabaseName} +quantml-uploader.storage-processor.database.databaseKind = ${yeastx-databaseKind} +quantml-uploader.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor + +archiver.class = ch.systemsx.cisd.yeastx.etl.MLArchiverTask +archiver.database.databaseEngineCode = ${yeastx-databaseEngineCode} +archiver.database.basicDatabaseName = ${yeastx-basicDatabaseName} +archiver.database.databaseKind = ${yeastx-databaseKind} +archiver.unique-sample-name-property-code = ${sample-name-property-code} +archiver.unique-experiment-name-property-code = ${experiment-name-property-code} diff --git a/rtd_yeastx/resource/eclipse/yeastx Data Store Server.launch b/rtd_yeastx/resource/eclipse/yeastx Data Store Server.launch new file mode 100644 index 0000000000000000000000000000000000000000..e4c351b6f44411b59fb0f3860575bace59aa3d1a --- /dev/null +++ b/rtd_yeastx/resource/eclipse/yeastx Data Store Server.launch @@ -0,0 +1,13 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<launchConfiguration type="org.eclipse.jdt.launching.localJavaApplication"> +<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS"> +<listEntry value="/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/DataStoreServer.java"/> +</listAttribute> +<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES"> +<listEntry value="1"/> +</listAttribute> +<booleanAttribute key="org.eclipse.debug.core.appendEnvironmentVariables" value="true"/> +<stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value="ch.systemsx.cisd.openbis.dss.generic.DataStoreServer"/> +<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="rtd_yeastx"/> +<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-ea"/> +</launchConfiguration> diff --git a/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/db/IGenericDAO.java b/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/db/IGenericDAO.java index a6258f0df9e89d63ccb4dc757ddcef3df499d18a..8e03104e266f530995c855217f7c98a7e08a54ad 100644 --- a/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/db/IGenericDAO.java +++ b/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/db/IGenericDAO.java @@ -62,4 +62,7 @@ public interface IGenericDAO extends TransactionQuery @Update(sql = "delete from DATA_SETS where PERM_ID=?{1.permId}", batchUpdate = true) public void deleteDataSets(List<DMDataSetDTO> dataSets); + @Update(sql = "delete from DATA_SETS where PERM_ID=?{1}") + public void deleteDataSet(String permId); + } diff --git a/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/etl/ML2DatabaseUploader.java b/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/etl/ML2DatabaseUploader.java index ae61eb7f28249933b6fe0d4a495eda5910da1495..a5f81b33371c385c6bfb4c37f3dba6ae50487d8a 100644 --- a/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/etl/ML2DatabaseUploader.java +++ b/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/etl/ML2DatabaseUploader.java @@ -84,6 +84,21 @@ public class ML2DatabaseUploader implements IDataSetUploader /** uploads files with recognized extensions to the additional database */ public void upload(File dataSet, DataSetInformation dataSetInformation) throws EnvironmentFailureException + { + uploadData(dataSet, dataSetInformation.tryToGetSample(), dataSetInformation + .tryToGetExperiment(), dataSetInformation.getDataSetCode(), dataSetInformation); + } + + /** + * Uploads files with recognized extensions to the additional database. + */ + public void upload(File dataSet, Sample sampleOrNull, Experiment experiment, String dataSetCode) + { + uploadData(dataSet, sampleOrNull, experiment, dataSetCode, null); + } + + private void uploadData(File dataSet, Sample sample, Experiment experiment, String dataSetCode, + DataSetInformation dataSetInformationOrNull) { try { @@ -92,10 +107,12 @@ public class ML2DatabaseUploader implements IDataSetUploader throw new IllegalStateException( "Current transaction has been neither commited nor rollbacked."); } - this.currentTransaction = tryGetDatasetUploader(dataSet, dataSetInformation); + this.currentTransaction = + tryGetDatasetUploader(dataSet, isMZXMLWithNoConversion(dataSet, + dataSetInformationOrNull)); if (currentTransaction != null) { - DMDataSetDTO openbisBacklink = createBacklink(dataSetInformation); + DMDataSetDTO openbisBacklink = createBacklink(dataSetCode, sample, experiment); currentTransaction.upload(dataSet, openbisBacklink); } } catch (SQLException e) @@ -104,12 +121,11 @@ public class ML2DatabaseUploader implements IDataSetUploader .fromTemplate( e, "A database error occured while extracting additional information from '%s' file content for '%s' dataset.", - dataSet.getPath(), dataSetInformation.getDataSetCode()); + dataSet.getPath(), dataSetCode); } } - private IDatasetLoader tryGetDatasetUploader(File dataSet, DataSetInformation dataSetInformation) - throws SQLException + private IDatasetLoader tryGetDatasetUploader(File dataSet, boolean isMZXML) throws SQLException { String extension = getExtension(dataSet); if (extension.equalsIgnoreCase(ConstantsYeastX.FIAML_EXT)) @@ -121,17 +137,23 @@ public class ML2DatabaseUploader implements IDataSetUploader } else if (extension.equalsIgnoreCase(ConstantsYeastX.QUANTML_EXT)) { return quantML2Database; - } else if (extension.equalsIgnoreCase(ConstantsYeastX.MZXML_EXT)) + } else if (isMZXML) { - DataSetInformationYeastX info = (DataSetInformationYeastX) dataSetInformation; - if (info.getConversion() == MLConversionType.NONE) - { - return mzXml2Database; - } + return mzXml2Database; } return null; } + private static boolean isMZXMLWithNoConversion(File dataSet, + DataSetInformation dataSetInformationOrNull) + { + boolean isMZXMLExtension = + getExtension(dataSet).equalsIgnoreCase(ConstantsYeastX.MZXML_EXT); + boolean dataSetUndefined = dataSetInformationOrNull == null; + return (isMZXMLExtension && (dataSetUndefined || ((dataSetInformationOrNull instanceof DataSetInformationYeastX && ((DataSetInformationYeastX) dataSetInformationOrNull) + .getConversion() == MLConversionType.NONE)))); + } + public void commit() { try @@ -160,10 +182,8 @@ public class ML2DatabaseUploader implements IDataSetUploader } } - private DMDataSetDTO createBacklink(DataSetInformation dataSetInformation) + private DMDataSetDTO createBacklink(String datasetPermId, Sample sample, Experiment experiment) { - String datasetPermId = dataSetInformation.getDataSetCode(); - Sample sample = dataSetInformation.tryToGetSample(); String sampleName = UNKNOWN_NAME; String sampPermIdOrNull = null; if (sample != null) @@ -171,12 +191,10 @@ public class ML2DatabaseUploader implements IDataSetUploader sampleName = findSampleName(sample.getProperties()); sampPermIdOrNull = sample.getPermId(); } - Experiment experiment = dataSetInformation.tryToGetExperiment(); if (experiment == null) { throw new EnvironmentFailureException( - "No information about the experiment connected to a dataset " - + dataSetInformation); + "No information about the experiment connected to a dataset " + datasetPermId); } String experimentName = findExperimentName(experiment.getProperties()); diff --git a/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/etl/MLArchiverTask.java b/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/etl/MLArchiverTask.java new file mode 100644 index 0000000000000000000000000000000000000000..2489c04fde25c74514edf3733103c59ff3417860 --- /dev/null +++ b/rtd_yeastx/source/java/ch/systemsx/cisd/yeastx/etl/MLArchiverTask.java @@ -0,0 +1,132 @@ +/* + * Copyright 2010 ETH Zuerich, CISD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ch.systemsx.cisd.yeastx.etl; + +import java.io.File; +import java.util.Properties; + +import javax.sql.DataSource; + +import net.lemnik.eodsql.QueryTool; + +import ch.systemsx.cisd.common.exceptions.UserFailureException; +import ch.systemsx.cisd.dbmigration.DatabaseConfigurationContext; +import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.AbstractArchiverProcessingPlugin; +import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider; +import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Experiment; +import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Sample; +import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifier; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SpaceIdentifier; +import ch.systemsx.cisd.yeastx.db.DBUtils; +import ch.systemsx.cisd.yeastx.db.IGenericDAO; + +/** + * Archiver that removes/adds data related to given data set from/to metabol database. + * + * @author Izabela Adamczyk + */ +public class MLArchiverTask extends AbstractArchiverProcessingPlugin +{ + + private static final long serialVersionUID = 1L; + + public MLArchiverTask(Properties properties, File storeRoot) + { + super(properties, storeRoot, null, null); + } + + /** + * Deletes data related to given data set from metabol database. + */ + @Override + protected void archive(DatasetDescription dataset) throws UserFailureException + { + final IGenericDAO dao = createQuery(properties); + try + { + dao.deleteDataSet(dataset.getDatasetCode()); + dao.commit(); + } catch (Exception ex) + { + dao.rollback(); + throw new UserFailureException(ex.getMessage()); + } finally + { + dao.close(); + } + } + + private static IGenericDAO createQuery(Properties properties) + { + final DatabaseConfigurationContext dbContext = DBUtils.createAndInitDBContext(properties); + DataSource dataSource = dbContext.getDataSource(); + return QueryTool.getQuery(dataSource, IGenericDAO.class); + } + + /** + * Adds data related to given data set to metabol database. + */ + @Override + protected void unarchive(DatasetDescription dataset) throws UserFailureException + { + try + { + Sample sample = null; + if (dataset.getSampleCode() != null) + { + SampleIdentifier sampleIdentifier = + new SampleIdentifier(new SpaceIdentifier(dataset.getDatabaseInstanceCode(), + dataset.getGroupCode()), dataset.getSampleCode()); + sample = + ServiceProvider.getOpenBISService().tryGetSampleWithExperiment( + sampleIdentifier); + } + ExperimentIdentifier experimentIdentifier = + new ExperimentIdentifier(dataset.getDatabaseInstanceCode(), dataset + .getGroupCode(), dataset.getProjectCode(), dataset.getExperimentCode()); + Experiment experiment = + ServiceProvider.getOpenBISService().tryToGetExperiment(experimentIdentifier); + ML2DatabaseUploader databaseUploader = new ML2DatabaseUploader(properties); + databaseUploader.upload(getDataFile(dataset), sample, experiment, dataset + .getDatasetCode()); + databaseUploader.commit(); + } catch (Exception ex) + { + throw new UserFailureException(ex.getMessage()); + } + } + + private File getDataFile(DatasetDescription dataset) + { + File datasetDir = getDataSubDir(dataset); + File[] files = datasetDir.listFiles(); + if (files.length < 1) + { + throw new UserFailureException( + "Data set directory contains no files (exactly one expected)"); + } else if (files.length > 1) + { + throw new UserFailureException( + "Data set directory contains more than one file (exactly one expected)"); + } else + { + return files[0]; + } + } +}