From e661698442f2f8411c52100f2c7ab57ed076b50c Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 9 Mar 2026 19:23:53 -0700 Subject: [PATCH 1/7] Consolidate Dataclass data update methods - use DIB for update only --- .../labkey/api/exp/api/ExperimentService.java | 3 +- .../labkey/api/exp/query/ExpDataTable.java | 173 +- .../api/query/AbstractQueryUpdateService.java | 3117 +++++++++-------- .../api/query/DefaultQueryUpdateService.java | 1877 +++++----- .../test/integration/DataClassCrud.ispec.ts | 156 +- .../labkey/experiment/ExpDataIterators.java | 75 +- .../labkey/experiment/ExperimentModule.java | 2346 ++++++------- .../experiment/ExperimentUpgradeCode.java | 111 + .../experiment/api/DataClassDomainKind.java | 6 +- .../api/ExpDataClassDataTableImpl.java | 320 +- .../labkey/experiment/api/ExpDataImpl.java | 1963 +++++------ .../api/SampleTypeUpdateServiceDI.java | 104 +- .../samples/AbstractExpFolderImporter.java | 1 - 13 files changed, 5113 insertions(+), 5139 deletions(-) diff --git a/api/src/org/labkey/api/exp/api/ExperimentService.java b/api/src/org/labkey/api/exp/api/ExperimentService.java index be904efff7c..2575d10582e 100644 --- a/api/src/org/labkey/api/exp/api/ExperimentService.java +++ b/api/src/org/labkey/api/exp/api/ExperimentService.java @@ -130,6 +130,8 @@ public interface ExperimentService extends ExperimentRunTypeSource String EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS = "org.labkey.api.exp.api.ExperimentService#FROM_EXPANCESTORS"; + String EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE = "org.labkey.experiment.api.SampleTypeUpdateServiceDI#ALLOW_ROW_ID_SAMPLE_MERGE"; + int SIMPLE_PROTOCOL_FIRST_STEP_SEQUENCE = 1; int SIMPLE_PROTOCOL_CORE_STEP_SEQUENCE = 10; int SIMPLE_PROTOCOL_EXTRA_STEP_SEQUENCE = 15; @@ -149,7 +151,6 @@ static void setInstance(ExperimentService impl) enum QueryOptions { - UseLsidForUpdate, GetSampleRecomputeCol, SkipBulkRemapCache, DeferRequiredLineageValidation, diff --git a/api/src/org/labkey/api/exp/query/ExpDataTable.java b/api/src/org/labkey/api/exp/query/ExpDataTable.java index e8ab68a2abb..99f42cb5b27 100644 --- a/api/src/org/labkey/api/exp/query/ExpDataTable.java +++ b/api/src/org/labkey/api/exp/query/ExpDataTable.java @@ -1,84 +1,89 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.api.exp.query; - -import org.labkey.api.data.MutableColumnInfo; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpExperiment; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.query.FieldKey; - -public interface ExpDataTable extends ExpTable -{ - enum Column - { - RowId, - LSID, - Name, - Description, - DataClass, - Protocol, - SourceProtocolApplication, - SourceApplicationInput, - DataFileUrl, - ReferenceCount, - Run, - RunApplication, - RunApplicationOutput, - Created, - CreatedBy, - Modified, - ModifiedBy, - Folder, - Flag, - Alias, - DownloadLink, - ContentLink, - ViewFileLink, - Thumbnail, - InlineThumbnail, - FileSize, - FileExists, - FileExtension, - ViewOrDownload, - WebDavUrl, - WebDavUrlRelative, - Generated, - LastIndexed, - Inputs, - Outputs, - Properties; - - public FieldKey fieldKey() - { - return FieldKey.fromParts(name()); - } - } - - void setExperiment(ExpExperiment experiment); - ExpExperiment getExperiment(); - void setRun(ExpRun run); - ExpRun getRun(); - - void setDataType(DataType type); - DataType getDataType(); - - MutableColumnInfo addMaterialInputColumn(String alias, SamplesSchema schema, String inputRole, ExpSampleType sampleType); - MutableColumnInfo addDataInputColumn(String alias, String role); - MutableColumnInfo addInputRunCountColumn(String alias); -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.api.exp.query; + +import org.labkey.api.data.MutableColumnInfo; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpExperiment; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.query.FieldKey; + +public interface ExpDataTable extends ExpTable +{ + enum Column + { + Alias, + ContentLink, + ClassId, // database table only + CpasType, // database table only + Created, + CreatedBy, + DataClass, + DataFileUrl, + Description, + DownloadLink, + FileExtension, + FileExists, + FileSize, + Flag, + Folder, + Generated, + InlineThumbnail, + Inputs, + LastIndexed, + LSID, + Modified, + ModifiedBy, + Name, + ObjectId, // database table only + Outputs, + Properties, + Protocol, + ReferenceCount, + Run, + RunApplication, + RunApplicationOutput, + RunId, // database table only + RowId, + SourceApplicationId, // database table only + SourceApplicationInput, + SourceProtocolApplication, + Thumbnail, + ViewFileLink, + ViewOrDownload, + WebDavUrl, + WebDavUrlRelative; + + public FieldKey fieldKey() + { + return FieldKey.fromParts(name()); + } + } + + void setExperiment(ExpExperiment experiment); + ExpExperiment getExperiment(); + void setRun(ExpRun run); + ExpRun getRun(); + + void setDataType(DataType type); + DataType getDataType(); + + MutableColumnInfo addMaterialInputColumn(String alias, SamplesSchema schema, String inputRole, ExpSampleType sampleType); + MutableColumnInfo addDataInputColumn(String alias, String role); + MutableColumnInfo addInputRunCountColumn(String alias); +} diff --git a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java index 6aaa7f8e4bb..4972b62bd17 100644 --- a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java +++ b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java @@ -1,1557 +1,1560 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.LogManager; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.labkey.api.assay.AssayFileWriter; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.attachments.AttachmentParentFactory; -import org.labkey.api.attachments.SpringAttachmentFile; -import org.labkey.api.audit.AuditLogService; -import org.labkey.api.audit.TransactionAuditProvider; -import org.labkey.api.audit.provider.FileSystemAuditProvider; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.Sets; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.DbSequenceManager; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.ImportAliasable; -import org.labkey.api.data.MultiValuedForeignKey; -import org.labkey.api.data.PropertyStorageSpec; -import org.labkey.api.data.RuntimeSQLException; -import org.labkey.api.data.Sort; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.dialect.SqlDialect; -import org.labkey.api.dataiterator.AttachmentDataIterator; -import org.labkey.api.dataiterator.DataIterator; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.DetailedAuditLogDataIterator; -import org.labkey.api.dataiterator.ExistingRecordDataIterator; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.dataiterator.Pump; -import org.labkey.api.dataiterator.StandardDataIteratorBuilder; -import org.labkey.api.dataiterator.TriggerDataBuilderHelper; -import org.labkey.api.dataiterator.WrapperDataIterator; -import org.labkey.api.exceptions.OptimisticConflictException; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.MvColumn; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.list.ListDefinition; -import org.labkey.api.exp.list.ListService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.files.FileContentService; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.ontology.OntologyService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.pipeline.PipeRoot; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.reader.TabLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.UserPrincipal; -import org.labkey.api.security.permissions.AdminPermission; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.test.TestWhen; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.GUID; -import org.labkey.api.util.JunitUtil; -import org.labkey.api.util.TestContext; -import org.labkey.api.util.URIUtil; -import org.labkey.api.view.NotFoundException; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.api.writer.VirtualFile; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.File; -import java.io.IOException; -import java.io.StringReader; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.function.Function; - -import static java.util.Objects.requireNonNull; -import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; -import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; -import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; -import static org.labkey.api.files.FileContentService.UPLOADED_FILE; -import static org.labkey.api.util.FileUtil.toFileForRead; -import static org.labkey.api.util.FileUtil.toFileForWrite; - -public abstract class AbstractQueryUpdateService implements QueryUpdateService -{ - protected final TableInfo _queryTable; - - private boolean _bulkLoad = false; - private CaseInsensitiveHashMap _columnImportMap = null; - private VirtualFile _att = null; - - /* AbstractQueryUpdateService is generally responsible for some shared functionality - * - triggers - * - coercion/validation - * - detailed logging - * - attachments - * - * If a subclass wants to disable some of these features (w/o subclassing), put flags here... - */ - protected boolean _enableExistingRecordsDataIterator = true; - protected Set _previouslyUpdatedRows = new HashSet<>(); - - protected AbstractQueryUpdateService(TableInfo queryTable) - { - if (queryTable == null) - throw new IllegalArgumentException(); - _queryTable = queryTable; - } - - protected TableInfo getQueryTable() - { - return _queryTable; - } - - public @NotNull Set getPreviouslyUpdatedRows() - { - return _previouslyUpdatedRows == null ? new HashSet<>() : _previouslyUpdatedRows; - } - - @Override - public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class acl) - { - return getQueryTable().hasPermission(user, acl); - } - - protected Map getRow(User user, Container container, Map keys, boolean allowCrossContainer) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - return getRow(user, container, keys); - } - - protected abstract Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException; - - @Override - public List> getRows(User user, Container container, List> keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - if (!hasPermission(user, ReadPermission.class)) - throw new UnauthorizedException("You do not have permission to read data from this table."); - - List> result = new ArrayList<>(); - for (Map rowKeys : keys) - { - Map row = getRow(user, container, rowKeys); - if (row != null) - result.add(row); - } - return result; - } - - @Override - public Map> getExistingRows(User user, Container container, Map> keys, boolean verifyNoCrossFolderData, boolean verifyExisting, @Nullable Set columns) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - if (!hasPermission(user, ReadPermission.class)) - throw new UnauthorizedException("You do not have permission to read data from this table."); - - Map> result = new LinkedHashMap<>(); - for (Map.Entry> key : keys.entrySet()) - { - Map row = getRow(user, container, key.getValue(), verifyNoCrossFolderData); - if (row != null && !row.isEmpty()) - { - result.put(key.getKey(), row); - if (verifyNoCrossFolderData) - { - String dataContainer = (String) row.get("container"); - if (StringUtils.isEmpty(dataContainer)) - dataContainer = (String) row.get("folder"); - if (!container.getId().equals(dataContainer)) - throw new InvalidKeyException("Data does not belong to folder '" + container.getName() + "': " + key.getValue().values()); - } - } - else if (verifyExisting) - throw new InvalidKeyException("Data not found for " + key.getValue().values()); - } - return result; - } - - @Override - public boolean hasExistingRowsInOtherContainers(Container container, Map> keys) - { - return false; - } - - public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction) - { - return createTransactionAuditEvent(container, auditAction, null); - } - - public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction, @Nullable Map details) - { - long auditId = DbSequenceManager.get(ContainerManager.getRoot(), DB_SEQUENCE_NAME).next(); - TransactionAuditProvider.TransactionAuditEvent event = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, auditId); - if (details != null) - event.addDetails(details); - return event; - } - - public static void addTransactionAuditEvent(DbScope.Transaction transaction, User user, TransactionAuditProvider.TransactionAuditEvent auditEvent) - { - UserSchema schema = AuditLogService.getAuditLogSchema(user, ContainerManager.getRoot()); - - if (schema != null) - { - // This is a little hack to ensure that the audit table has actually been created and gets put into the table cache by the time the - // pre-commit task is executed. Otherwise, since the creation of the table happens while within the commit for the - // outermost transaction, it looks like there is a close that hasn't happened when trying to commit the transaction for creating the - // table. - schema.getTable(auditEvent.getEventType(), false); - - transaction.addCommitTask(() -> AuditLogService.get().addEvent(user, auditEvent), DbScope.CommitTaskOption.PRECOMMIT); - - transaction.setAuditEvent(auditEvent); - } - } - - protected final DataIteratorContext getDataIteratorContext(BatchValidationException errors, InsertOption forImport, Map configParameters) - { - if (null == errors) - errors = new BatchValidationException(); - DataIteratorContext context = new DataIteratorContext(errors); - context.setInsertOption(forImport); - context.setConfigParameters(configParameters); - configureDataIteratorContext(context); - recordDataIteratorUsed(configParameters); - - return context; - } - - protected void recordDataIteratorUsed(@Nullable Map configParameters) - { - if (configParameters == null) - return; - - try - { - configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); - } - catch (UnsupportedOperationException ignore) - { - // configParameters is immutable, likely originated from a junit test - } - } - - /** - * If QUS wants to use something other than PKs to select existing rows for merge, it can override this method. - * Used only for generating ExistingRecordDataIterator at the moment. - */ - protected Set getSelectKeys(DataIteratorContext context) - { - if (!context.getAlternateKeys().isEmpty()) - return context.getAlternateKeys(); - return null; - } - - /* - * construct the core DataIterator transformation pipeline for this table, may be just StandardDataIteratorBuilder. - * does NOT handle triggers or the insert/update iterator. - */ - public DataIteratorBuilder createImportDIB(User user, Container container, DataIteratorBuilder data, DataIteratorContext context) - { - DataIteratorBuilder dib = StandardDataIteratorBuilder.forInsert(getQueryTable(), data, container, user); - - if (_enableExistingRecordsDataIterator || context.getInsertOption().updateOnly) - { - // some tables need to generate PKs, so they need to add ExistingRecordDataIterator in persistRows() (after generating PK, before inserting) - dib = ExistingRecordDataIterator.createBuilder(dib, getQueryTable(), getSelectKeys(context)); - } - - dib = ((UpdateableTableInfo) getQueryTable()).persistRows(dib, context); - dib = AttachmentDataIterator.getAttachmentDataIteratorBuilder(getQueryTable(), dib, user, context.getInsertOption().batch ? getAttachmentDirectory() : null, container, getAttachmentParentFactory()); - dib = DetailedAuditLogDataIterator.getDataIteratorBuilder(getQueryTable(), dib, context.getInsertOption(), user, container, null); - return dib; - } - - - /** - * Implementation to use insertRows() while we migrate to using DIB for all code paths - *

- * DataIterator should/must use the same error collection as passed in - */ - @Deprecated - protected int _importRowsUsingInsertRows(User user, Container container, DataIterator rows, BatchValidationException errors, Map extraScriptContext) - { - MapDataIterator mapIterator = DataIteratorUtil.wrapMap(rows, true); - List> list = new ArrayList<>(); - List> ret; - Exception rowException; - - try - { - while (mapIterator.next()) - list.add(mapIterator.getMap()); - ret = insertRows(user, container, list, errors, null, extraScriptContext); - if (errors.hasErrors()) - return 0; - return ret.size(); - } - catch (BatchValidationException x) - { - assert x == errors; - assert x.hasErrors(); - return 0; - } - catch (QueryUpdateServiceException | DuplicateKeyException | SQLException x) - { - rowException = x; - } - finally - { - DataIteratorUtil.closeQuietly(mapIterator); - } - errors.addRowError(new ValidationException(rowException.getMessage())); - return 0; - } - - protected boolean hasImportRowsPermission(User user, Container container, DataIteratorContext context) - { - return hasPermission(user, context.getInsertOption().updateOnly ? UpdatePermission.class : InsertPermission.class); - } - - protected boolean hasInsertRowsPermission(User user) - { - return hasPermission(user, InsertPermission.class); - } - - protected boolean hasDeleteRowsPermission(User user) - { - return hasPermission(user, DeletePermission.class); - } - - protected boolean hasUpdateRowsPermission(User user) - { - return hasPermission(user, UpdatePermission.class); - } - - // override this - protected void preImportDIBValidation(@Nullable DataIteratorBuilder in, @Nullable Collection inputColumns) - { - } - - protected int _importRowsUsingDIB(User user, Container container, DataIteratorBuilder in, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasImportRowsPermission(user, container, context)) - throw new UnauthorizedException("You do not have permission to " + (context.getInsertOption().updateOnly ? "update data in this table." : "insert data into this table.")); - - if (!context.getConfigParameterBoolean(ConfigParameters.SkipInsertOptionValidation)) - assert(getQueryTable().supportsInsertOption(context.getInsertOption())); - - context.getErrors().setExtraContext(extraScriptContext); - if (extraScriptContext != null) - { - context.setDataSource((String) extraScriptContext.get(DataIteratorUtil.DATA_SOURCE)); - } - - preImportDIBValidation(in, null); - - boolean skipTriggers = context.getConfigParameterBoolean(ConfigParameters.SkipTriggers) || context.isCrossTypeImport() || context.isCrossFolderImport(); - boolean hasTableScript = hasTableScript(container); - TriggerDataBuilderHelper helper = new TriggerDataBuilderHelper(getQueryTable(), container, user, extraScriptContext, context.getInsertOption().useImportAliases); - if (!skipTriggers) - { - in = preTriggerDataIterator(in, context); - if (hasTableScript) - in = helper.before(in); - } - DataIteratorBuilder importDIB = createImportDIB(user, container, in, context); - DataIteratorBuilder out = importDIB; - - if (!skipTriggers) - { - if (hasTableScript) - out = helper.after(importDIB); - - out = postTriggerDataIterator(out, context); - } - - if (hasTableScript) - { - context.setFailFast(false); - context.setMaxRowErrors(Math.max(context.getMaxRowErrors(),1000)); - } - int count = _pump(out, outputRows, context); - - if (context.getErrors().hasErrors()) - return 0; - - if (!context.getConfigParameterBoolean(ConfigParameters.SkipAuditSummary)) - _addSummaryAuditEvent(container, user, context, count); - - return count; - } - - protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) - { - return in; - } - - protected DataIteratorBuilder postTriggerDataIterator(DataIteratorBuilder out, DataIteratorContext context) - { - return out; - } - - /** this is extracted so subclasses can add wrap */ - protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> rows, DataIteratorContext context) - { - DataIterator it = etl.getDataIterator(context); - - try - { - if (null != rows) - { - MapDataIterator maps = DataIteratorUtil.wrapMap(it, false); - it = new WrapperDataIterator(maps) - { - @Override - public boolean next() throws BatchValidationException - { - boolean ret = super.next(); - if (ret) - rows.add(((MapDataIterator)_delegate).getMap()); - return ret; - } - }; - } - - Pump pump = new Pump(it, context); - pump.run(); - - return pump.getRowCount(); - } - finally - { - DataIteratorUtil.closeQuietly(it); - } - } - - /* can be used for simple bookkeeping tasks, per row processing belongs in a data iterator */ - protected void afterInsertUpdate(int count, BatchValidationException errors, boolean isUpdate) - { - afterInsertUpdate(count, errors); - } - - protected void afterInsertUpdate(int count, BatchValidationException errors) - {} - - @Override - public int loadRows(User user, Container container, DataIteratorBuilder rows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - return loadRows(user, container, rows, null, context, extraScriptContext); - } - - public int loadRows(User user, Container container, DataIteratorBuilder rows, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - configureDataIteratorContext(context); - int count = _importRowsUsingDIB(user, container, rows, outputRows, context, extraScriptContext); - afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); - return count; - } - - @Override - public int importRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, Map configParameters, @Nullable Map extraScriptContext) - { - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.IMPORT, configParameters); - int count = _importRowsUsingInsertRows(user, container, rows.getDataIterator(context), errors, extraScriptContext); - afterInsertUpdate(count, errors, context.getInsertOption().updateOnly); - return count; - } - - @Override - public int mergeRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - { - throw new UnsupportedOperationException("merge is not supported for all tables"); - } - - private boolean hasTableScript(Container container) - { - return getQueryTable().hasTriggers(container); - } - - - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException("Not implemented by this QueryUpdateService"); - } - - - protected @Nullable List> _insertRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasInsertRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to insert data into this table."); - - return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); - } - - protected @Nullable List> _insertUpdateRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - DataIteratorBuilder dib = _toDataIteratorBuilder(getClass().getSimpleName() + (context.getInsertOption().updateOnly ? ".updateRows" : ".insertRows()"), rows); - ArrayList> outputRows = new ArrayList<>(); - int count = _importRowsUsingDIB(user, container, dib, outputRows, context, extraScriptContext); - afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); - - if (context.getErrors().hasErrors()) - return null; - - return outputRows; - } - - // not yet supported - protected @Nullable List> _updateRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasUpdateRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to update data in this table."); - - return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); - } - - - protected DataIteratorBuilder _toDataIteratorBuilder(String debugName, List> rows) - { - // TODO probably can't assume all rows have all columns - // TODO can we assume that all rows refer to columns consistently? (not PTID and MouseId for the same column) - // TODO optimize ArrayListMap? - Set colNames; - - if (!rows.isEmpty() && rows.get(0) instanceof ArrayListMap) - { - colNames = ((ArrayListMap)rows.get(0)).getFindMap().keySet(); - } - else - { - // Preserve casing by using wrapped CaseInsensitiveHashMap instead of CaseInsensitiveHashSet - colNames = Sets.newCaseInsensitiveHashSet(); - for (Map row : rows) - colNames.addAll(row.keySet()); - } - - preImportDIBValidation(null, colNames); - return MapDataIterator.of(colNames, rows, debugName); - } - - - /** @deprecated switch to using DIB based method */ - @Deprecated - protected List> _insertRowsUsingInsertRow(User user, Container container, List> rows, BatchValidationException errors, Map extraScriptContext) - throws DuplicateKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasInsertRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to insert data into this table."); - - assert(getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - boolean hasTableScript = hasTableScript(container); - - errors.setExtraContext(extraScriptContext); - if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); - - List> result = new ArrayList<>(rows.size()); - List> providedValues = new ArrayList<>(rows.size()); - for (int i = 0; i < rows.size(); i++) - { - Map row = rows.get(i); - row = normalizeColumnNames(row); - try - { - providedValues.add(new CaseInsensitiveHashMap<>()); - row = coerceTypes(row, providedValues.get(i), false); - if (hasTableScript) - { - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, true, i, row, null, extraScriptContext); - } - row = insertRow(user, container, row); - if (row == null) - continue; - - if (hasTableScript) - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, false, i, row, null, extraScriptContext); - result.add(row); - } - catch (SQLException sqlx) - { - if (StringUtils.startsWith(sqlx.getSQLState(), "22") || RuntimeSQLException.isConstraintException(sqlx)) - { - ValidationException vex = new ValidationException(sqlx.getMessage()); - vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i+1); - errors.addRowError(vex); - } - else if (SqlDialect.isTransactionException(sqlx) && errors.hasErrors()) - { - // if we already have some errors, just break - break; - } - else - { - throw sqlx; - } - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - } - - if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); - - addAuditEvent(user, container, QueryService.AuditAction.INSERT, null, result, null, providedValues); - - return result; - } - - protected void addAuditEvent(User user, Container container, QueryService.AuditAction auditAction, @Nullable Map configParameters, @Nullable List> rows, @Nullable List> existingRows, @Nullable List> providedValues) - { - if (!isBulkLoad()) - { - AuditBehaviorType auditBehavior = configParameters != null ? (AuditBehaviorType) configParameters.get(AuditBehavior) : null; - String userComment = configParameters == null ? null : (String) configParameters.get(AuditUserComment); - getQueryTable().getAuditHandler(auditBehavior) - .addAuditEvent(user, container, getQueryTable(), auditBehavior, userComment, auditAction, rows, existingRows, providedValues); - } - } - - private Map normalizeColumnNames(Map row) - { - if(_columnImportMap == null) - { - _columnImportMap = (CaseInsensitiveHashMap)ImportAliasable.Helper.createImportMap(getQueryTable().getColumns(), false); - } - - Map newRow = new CaseInsensitiveHashMap<>(); - CaseInsensitiveHashSet columns = new CaseInsensitiveHashSet(); - columns.addAll(row.keySet()); - - String newName; - for(String key : row.keySet()) - { - if(_columnImportMap.containsKey(key)) - { - //it is possible for a normalized name to conflict with an existing property. if so, defer to the original - newName = _columnImportMap.get(key).getName(); - if(!columns.contains(newName)){ - newRow.put(newName, row.get(key)); - continue; - } - } - newRow.put(key, row.get(key)); - } - - return newRow; - } - - @Override - public List> insertRows(User user, Container container, List> rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - throws DuplicateKeyException, QueryUpdateServiceException, SQLException - { - try - { - List> ret = _insertRowsUsingInsertRow(user, container, rows, errors, extraScriptContext); - afterInsertUpdate(null==ret?0:ret.size(), errors); - if (errors.hasErrors()) - return null; - return ret; - } - catch (BatchValidationException x) - { - assert x == errors; - assert x.hasErrors(); - } - return null; - } - - protected Object coerceTypesValue(ColumnInfo col, Map providedValues, String key, Object value) - { - if (col != null && value != null && - !col.getJavaObjectClass().isInstance(value) && - !(value instanceof AttachmentFile) && - !(value instanceof MultipartFile) && - !(value instanceof String[]) && - !(col.isMultiValued() || col.getFk() instanceof MultiValuedForeignKey)) - { - try - { - if (col.getKindOfQuantity() != null) - providedValues.put(key, value); - if (PropertyType.FILE_LINK.equals(col.getPropertyType())) - value = ExpDataFileConverter.convert(value); - else - value = col.convert(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw e; - } - catch (ConversionException e) - { - // That's OK, the transformation script may be able to fix up the value before it gets inserted - } - } - - return value; - } - - /** Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion */ - @Deprecated - protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) - { - Map result = new CaseInsensitiveHashMap<>(row.size()); - Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); - for (Map.Entry entry : row.entrySet()) - { - ColumnInfo col = columnMap.get(entry.getKey()); - Object value = coerceTypesValue(col, providedValues, entry.getKey(), entry.getValue()); - result.put(entry.getKey(), value); - } - - return result; - } - - protected abstract Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; - - - protected boolean firstUpdateRow = true; - Function,Map> updateTransform = Function.identity(); - - /* Do standard AQUS stuff here, then call the subclass specific implementation of updateRow() */ - final protected Map updateOneRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - if (firstUpdateRow) - { - firstUpdateRow = false; - if (null != OntologyService.get()) - { - var t = OntologyService.get().getConceptUpdateHandler(_queryTable); - if (null != t) - updateTransform = t; - } - } - row = updateTransform.apply(row); - return updateRow(user, container, row, oldRow, configParameters); - } - - // used by updateRows to check if all rows have the same set of keys - // prepared statement can only be used to updateRows if all rows have the same set of keys - protected static boolean hasUniformKeys(List> rowsToUpdate) - { - if (rowsToUpdate == null || rowsToUpdate.isEmpty()) - return false; - - if (rowsToUpdate.size() == 1) - return true; - - Set keys = rowsToUpdate.get(0).keySet(); - int keySize = keys.size(); - - for (int i = 1 ; i < rowsToUpdate.size(); i ++) - { - Set otherKeys = rowsToUpdate.get(i).keySet(); - if (otherKeys.size() != keySize) - return false; - if (!otherKeys.containsAll(keys)) - return false; - } - - return true; - } - - @Override - public List> updateRows(User user, Container container, List> rows, List> oldKeys, - BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasUpdateRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to update data in this table."); - - if (oldKeys != null && rows.size() != oldKeys.size()) - throw new IllegalArgumentException("rows and oldKeys are required to be the same length, but were " + rows.size() + " and " + oldKeys + " in length, respectively"); - - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); - - List> result = new ArrayList<>(rows.size()); - List> oldRows = new ArrayList<>(rows.size()); - List> providedValues = new ArrayList<>(rows.size()); - // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container - boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; - - for (int i = 0; i < rows.size(); i++) - { - Map row = rows.get(i); - providedValues.add(new CaseInsensitiveHashMap<>()); - row = coerceTypes(row, providedValues.get(i), true); - try - { - Map oldKey = oldKeys == null ? row : oldKeys.get(i); - Map oldRow = null; - if (!streaming) - { - oldRow = getRow(user, container, oldKey); - if (oldRow == null) - throw new NotFoundException("The existing row was not found."); - } - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, true, i, row, oldRow, extraScriptContext); - Map updatedRow = updateOneRow(user, container, row, oldRow, configParameters); - if (!streaming && updatedRow == null) - continue; - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow, extraScriptContext); - if (!streaming) - { - result.add(updatedRow); - oldRows.add(oldRow); - } - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (OptimisticConflictException e) - { - errors.addRowError(new ValidationException("Unable to update. Row may have been deleted.")); - } - } - - // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); - afterInsertUpdate(null==result?0:result.size(), errors, true); - - if (errors.hasErrors()) - throw errors; - - addAuditEvent(user, container, QueryService.AuditAction.UPDATE, configParameters, result, oldRows, providedValues); - - return result; - } - - protected void checkDuplicateUpdate(Object pkVals) throws ValidationException - { - if (pkVals == null) - return; - - Set updatedRows = getPreviouslyUpdatedRows(); - - Object[] keysObj; - if (pkVals.getClass().isArray()) - keysObj = (Object[]) pkVals; - else if (pkVals instanceof Map map) - { - List orderedKeyVals = new ArrayList<>(); - SortedSet sortedKeys = new TreeSet<>(map.keySet()); - for (String key : sortedKeys) - orderedKeyVals.add(map.get(key)); - keysObj = orderedKeyVals.toArray(); - } - else - keysObj = new Object[]{pkVals}; - - if (keysObj.length == 1) - { - if (updatedRows.contains(keysObj[0])) - throw new ValidationException("Duplicate key provided: " + keysObj[0]); - updatedRows.add(keysObj[0]); - return; - } - - List keys = new ArrayList<>(); - for (Object key : keysObj) - keys.add(String.valueOf(key)); - if (updatedRows.contains(keys)) - throw new ValidationException("Duplicate key provided: " + StringUtils.join(keys, ", ")); - updatedRows.add(keys); - } - - @Override - public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException("Move is not supported for this table type."); - } - - protected abstract Map deleteRow(User user, Container container, Map oldRow) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; - - protected Map deleteRow(User user, Container container, Map oldRow, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return deleteRow(user, container, oldRow); - } - - @Override - public List> deleteRows(User user, Container container, List> keys, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasDeleteRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to delete data from this table."); - - BatchValidationException errors = new BatchValidationException(); - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); - - // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container - boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; - - List> result = new ArrayList<>(keys.size()); - for (int i = 0; i < keys.size(); i++) - { - Map key = keys.get(i); - try - { - Map oldRow = null; - if (!streaming) - { - oldRow = getRow(user, container, key); - // if row doesn't exist, bail early - if (oldRow == null) - continue; - } - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, true, i, null, oldRow, extraScriptContext); - Map updatedRow = deleteRow(user, container, oldRow, configParameters, extraScriptContext); - if (!streaming && updatedRow == null) - continue; - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, false, i, null, updatedRow, extraScriptContext); - result.add(updatedRow); - } - catch (InvalidKeyException ex) - { - ValidationException vex = new ValidationException(ex.getMessage()); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - } - - // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); - - addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, result, null, null); - - return result; - } - - protected int truncateRows(User user, Container container) - throws QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException(); - } - - @Override - public int truncateRows(User user, Container container, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!container.hasPermission(user, AdminPermission.class) && !hasDeleteRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to truncate this table."); - - BatchValidationException errors = new BatchValidationException(); - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, true, errors, extraScriptContext); - - int result = truncateRows(user, container); - - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, false, errors, extraScriptContext); - addAuditEvent(user, container, QueryService.AuditAction.TRUNCATE, configParameters, null, null, null); - - return result; - } - - @Override - public void setBulkLoad(boolean bulkLoad) - { - _bulkLoad = bulkLoad; - } - - @Override - public boolean isBulkLoad() - { - return _bulkLoad; - } - - public static Object saveFile(User user, Container container, String name, Object value, @Nullable String dirName) throws ValidationException, QueryUpdateServiceException - { - FileLike dirPath = AssayFileWriter.getUploadDirectoryPath(container, dirName); - return saveFile(user, container, name, value, dirPath); - } - - /** - * Save uploaded file to dirName directory under file or pipeline root. - */ - public static Object saveFile(User user, Container container, String name, Object value, @Nullable FileLike dirPath) throws ValidationException, QueryUpdateServiceException - { - if (!(value instanceof MultipartFile) && !(value instanceof SpringAttachmentFile)) - throw new ValidationException("Invalid file value"); - - String auditMessageFormat = "Saved file '%s' for field '%s' in folder %s."; - FileLike file = null; - try - { - FileLike dir = AssayFileWriter.ensureUploadDirectory(dirPath); - - FileSystemAuditProvider.FileSystemAuditEvent event = new FileSystemAuditProvider.FileSystemAuditEvent(container, null); - if (value instanceof MultipartFile multipartFile) - { - // Once we've found one, write it to disk and replace the row's value with just the File reference to it - if (multipartFile.isEmpty()) - { - throw new ValidationException("File " + multipartFile.getOriginalFilename() + " for field " + name + " has no content"); - } - file = FileUtil.findUniqueFileName(multipartFile.getOriginalFilename(), dir); - checkFileUnderRoot(container, file); - multipartFile.transferTo(toFileForWrite(file)); - event.setComment(String.format(auditMessageFormat, multipartFile.getOriginalFilename(), name, container.getPath())); - event.setProvidedFileName(multipartFile.getOriginalFilename()); - } - else - { - SpringAttachmentFile saf = (SpringAttachmentFile) value; - file = FileUtil.findUniqueFileName(saf.getFilename(), dir); - checkFileUnderRoot(container, file); - saf.saveTo(file); - event.setComment(String.format(auditMessageFormat, saf.getFilename(), name, container.getPath())); - event.setProvidedFileName(saf.getFilename()); - } - event.setFile(file.getName()); - event.setFieldName(name); - event.setDirectory(file.getParent().toURI().getPath()); - AuditLogService.get().addEvent(user, event); - } - catch (IOException | ExperimentException e) - { - throw new QueryUpdateServiceException(e); - } - - ensureExpData(user, container, file.toNioPathForRead().toFile()); - return file; - } - - public static ExpData ensureExpData(User user, Container container, File file) - { - ExpData existingData = ExperimentService.get().getExpDataByURL(file, container); - // create exp.data record - if (existingData == null) - { - File canonicalFile = FileUtil.getAbsoluteCaseSensitiveFile(file); - ExpData data = ExperimentService.get().createData(container, UPLOADED_FILE); - data.setName(file.getName()); - data.setDataFileURI(canonicalFile.toPath().toUri()); - if (data.getDataFileUrl() != null && data.getDataFileUrl().length() <= ExperimentService.get().getTinfoData().getColumn("DataFileURL").getScale()) - { - // If the path is too long to store, bail out without creating an exp.data row - data.save(user); - } - - return data; - } - - return existingData; - } - - // For security reasons, make sure the user hasn't tried to reference a file that's not under - // the pipeline root or @assayfiles root. Otherwise, they could get access to any file on the server - static FileLike checkFileUnderRoot(Container container, FileLike file) throws ExperimentException - { - Path assayFilesRoot = FileContentService.get().getFileRootPath(container, FileContentService.ContentType.assayfiles); - if (assayFilesRoot != null && URIUtil.isDescendant(assayFilesRoot.toUri(), file.toURI())) - return file; - - PipeRoot root = PipelineService.get().findPipelineRoot(container); - if (root == null) - throw new ExperimentException("Pipeline root not available in container " + container.getPath()); - - if (!root.isUnderRoot(toFileForRead(file))) - { - throw new ExperimentException("Cannot reference file '" + file + "' from " + container.getPath()); - } - - return file; - } - - protected void _addSummaryAuditEvent(Container container, User user, DataIteratorContext context, int count) - { - if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level - { - AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); - String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); - boolean skipAuditLevelCheck = false; - if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) - { - if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad - skipAuditLevelCheck = true; - } - getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); - } - } - - /** - * Is used by the AttachmentDataIterator to point to the location of the serialized - * attachment files. - */ - public void setAttachmentDirectory(VirtualFile att) - { - _att = att; - } - - @Nullable - protected VirtualFile getAttachmentDirectory() - { - return _att; - } - - /** - * QUS instances that allow import of attachments through the AttachmentDataIterator should furnish a factory - * implementation in order to resolve the attachment parent on incoming attachment files. - */ - @Nullable - protected AttachmentParentFactory getAttachmentParentFactory() - { - return null; - } - - /** Translate between the column name that query is exposing to the column name that actually lives in the database */ - protected static void aliasColumns(Map columnMapping, Map row) - { - for (Map.Entry entry : columnMapping.entrySet()) - { - if (row.containsKey(entry.getValue()) && !row.containsKey(entry.getKey())) - { - row.put(entry.getKey(), row.get(entry.getValue())); - } - } - } - - /** - * The database table has underscores for MV column names, but we expose a column without the underscore. - * Therefore, we need to translate between the two sets of column names. - * @return database column name -> exposed TableInfo column name - */ - protected static Map createMVMapping(Domain domain) - { - Map result = new CaseInsensitiveHashMap<>(); - if (domain != null) - { - for (DomainProperty domainProperty : domain.getProperties()) - { - if (domainProperty.isMvEnabled()) - { - result.put(PropertyStorageSpec.getMvIndicatorStorageColumnName(domainProperty.getPropertyDescriptor()), domainProperty.getName() + MvColumn.MV_INDICATOR_SUFFIX); - } - } - } - return result; - } - - @TestWhen(TestWhen.When.BVT) - public static class TestCase extends Assert - { - private boolean _useAlias = false; - - static TabLoader getTestData() throws IOException - { - TabLoader testData = new TabLoader(new StringReader("pk,i,s\n0,0,zero\n1,1,one\n2,2,two"),true); - testData.parseAsCSV(); - testData.getColumns()[0].clazz = Integer.class; - testData.getColumns()[1].clazz = Integer.class; - testData.getColumns()[2].clazz = String.class; - return testData; - } - - @BeforeClass - public static void createList() throws Exception - { - if (null == ListService.get()) - return; - deleteList(); - - TabLoader testData = getTestData(); - String hash = GUID.makeHash(); - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - ListService s = ListService.get(); - UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); - assertNotNull(lists); - - ListDefinition R = s.createList(c, "R", ListDefinition.KeyType.Integer); - R.setKeyName("pk"); - Domain d = requireNonNull(R.getDomain()); - for (int i=0 ; i> getRows() - { - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); - TableInfo rTableInfo = requireNonNull(lists.getTable("R", null)); - return Arrays.asList(new TableSelector(rTableInfo, TableSelector.ALL_COLUMNS, null, new Sort("PK")).getMapArray()); - } - - @Before - public void resetList() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - qus.truncateRows(user, c, null, null); - } - - @AfterClass - public static void deleteList() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - ListService s = ListService.get(); - Map m = s.getLists(c); - if (m.containsKey("R")) - m.get("R").delete(user); - } - - void validateDefaultData(List> rows) - { - assertEquals(3, rows.size()); - - assertEquals(0, rows.get(0).get("pk")); - assertEquals(1, rows.get(1).get("pk")); - assertEquals(2, rows.get(2).get("pk")); - - assertEquals(0, rows.get(0).get("i")); - assertEquals(1, rows.get(1).get("i")); - assertEquals(2, rows.get(2).get("i")); - - assertEquals("zero", rows.get(0).get("s")); - assertEquals("one", rows.get(1).get("s")); - assertEquals("two", rows.get(2).get("s")); - } - - @Test - public void INSERT() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - assert(getRows().isEmpty()); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - BatchValidationException errors = new BatchValidationException(); - var rows = qus.insertRows(user, c, getTestData().load(), errors, null, null); - assertFalse(errors.hasErrors()); - validateDefaultData(rows); - validateDefaultData(getRows()); - - qus.insertRows(user, c, getTestData().load(), errors, null, null); - assertTrue(errors.hasErrors()); - } - - @Test - public void UPSERT() throws Exception - { - if (null == ListService.get()) - return; - /* not sure how you use/test ImportOptions.UPSERT - * the only row returning QUS method is insertRows(), which doesn't let you specify the InsertOption? - */ - } - - @Test - public void IMPORT() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - assert(getRows().isEmpty()); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - BatchValidationException errors = new BatchValidationException(); - var count = qus.importRows(user, c, getTestData(), errors, null, null); - assertFalse(errors.hasErrors()); - assert(count == 3); - validateDefaultData(getRows()); - - qus.importRows(user, c, getTestData(), errors, null, null); - assertTrue(errors.hasErrors()); - } - - @Test - public void MERGE() throws Exception - { - if (null == ListService.get()) - return; - INSERT(); - assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var mergeRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); - BatchValidationException errors = new BatchValidationException() - { - @Override - public void addRowError(ValidationException vex) - { - LogManager.getLogger(AbstractQueryUpdateService.class).error("test error", vex); - fail(vex.getMessage()); - } - }; - int count=0; - try (var tx = rTableInfo.getSchema().getScope().ensureTransaction()) - { - var ret = qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); - if (!errors.hasErrors()) - { - tx.commit(); - count = ret; - } - } - assertFalse("mergeRows error(s): " + errors.getMessage(), errors.hasErrors()); - assertEquals(2, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO", rows.get(2).get("s")); - // test existing row value is not updated - assertEquals(2, rows.get(2).get("i")); - // test new row - assertEquals("THREE", rows.get(3).get("s")); - assertNull(rows.get(3).get("i")); - - // merge should fail if duplicate keys are provided - errors = new BatchValidationException(); - mergeRows = new ArrayList<>(); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); - qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); - assertTrue(errors.hasErrors()); - assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); - } - - @Test - public void UPDATE() throws Exception - { - if (null == ListService.get()) - return; - INSERT(); - assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var updateRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - - // update using data iterator - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP")); - DataIteratorContext context = new DataIteratorContext(); - context.setInsertOption(InsertOption.UPDATE); - var count = qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertFalse(context.getErrors().hasErrors()); - assertEquals(1, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO-UP", rows.get(2).get("s")); - // test existing row value is not updated/erased - assertEquals(2, rows.get(2).get("i")); - - // update should fail if a new record is provided - updateRows = new ArrayList<>(); - updateRows.add(CaseInsensitiveHashMap.of(pkName,123,colName,"NEW")); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertTrue(context.getErrors().hasErrors()); - - // Issue 52728: update should fail if duplicate key is provide - updateRows = new ArrayList<>(); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); - - // use DIB - context = new DataIteratorContext(); - context.setInsertOption(InsertOption.UPDATE); - qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertTrue(context.getErrors().hasErrors()); - assertTrue("Duplicate key error: " + context.getErrors().getMessage(), context.getErrors().getMessage().contains("Duplicate key provided: 2")); - - // use updateRows - if (!_useAlias) // _update using alias is not supported - { - BatchValidationException errors = new BatchValidationException(); - try - { - qus.updateRows(user, c, updateRows, null, errors, null, null); - } - catch (Exception e) - { - - } - assertTrue(errors.hasErrors()); - assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); - - } - } - - @Test - public void REPLACE() throws Exception - { - if (null == ListService.get()) - return; - assert(getRows().isEmpty()); - INSERT(); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var mergeRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); - DataIteratorContext context = new DataIteratorContext(); - context.setInsertOption(InsertOption.REPLACE); - var count = qus.loadRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), context, null); - assertFalse(context.getErrors().hasErrors()); - assertEquals(2, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO", rows.get(2).get("s")); - // test existing row value is updated - assertNull(rows.get(2).get("i")); - // test new row - assertEquals("THREE", rows.get(3).get("s")); - assertNull(rows.get(3).get("i")); - } - - @Test - public void IMPORT_IDENTITY() - { - if (null == ListService.get()) - return; - // TODO - } - - @Test - public void ALIAS_MERGE() throws Exception - { - _useAlias = true; - MERGE(); - } - - @Test - public void ALIAS_REPLACE() throws Exception - { - _useAlias = true; - REPLACE(); - } - - @Test - public void ALIAS_UPDATE() throws Exception - { - _useAlias = true; - UPDATE(); - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.labkey.api.assay.AssayFileWriter; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.attachments.AttachmentParentFactory; +import org.labkey.api.attachments.SpringAttachmentFile; +import org.labkey.api.audit.AuditLogService; +import org.labkey.api.audit.TransactionAuditProvider; +import org.labkey.api.audit.provider.FileSystemAuditProvider; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.Sets; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.DbSequenceManager; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.ImportAliasable; +import org.labkey.api.data.MultiValuedForeignKey; +import org.labkey.api.data.PropertyStorageSpec; +import org.labkey.api.data.RuntimeSQLException; +import org.labkey.api.data.Sort; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.dialect.SqlDialect; +import org.labkey.api.dataiterator.AttachmentDataIterator; +import org.labkey.api.dataiterator.DataIterator; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.DetailedAuditLogDataIterator; +import org.labkey.api.dataiterator.ExistingRecordDataIterator; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.dataiterator.Pump; +import org.labkey.api.dataiterator.StandardDataIteratorBuilder; +import org.labkey.api.dataiterator.TriggerDataBuilderHelper; +import org.labkey.api.dataiterator.WrapperDataIterator; +import org.labkey.api.exceptions.OptimisticConflictException; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.MvColumn; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.list.ListDefinition; +import org.labkey.api.exp.list.ListService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.files.FileContentService; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.ontology.OntologyService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.pipeline.PipeRoot; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.reader.TabLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.UserPrincipal; +import org.labkey.api.security.permissions.AdminPermission; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.test.TestWhen; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.GUID; +import org.labkey.api.util.JunitUtil; +import org.labkey.api.util.TestContext; +import org.labkey.api.util.URIUtil; +import org.labkey.api.view.NotFoundException; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.api.writer.VirtualFile; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.File; +import java.io.IOException; +import java.io.StringReader; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Function; + +import static java.util.Objects.requireNonNull; +import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; +import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; +import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; +import static org.labkey.api.files.FileContentService.UPLOADED_FILE; +import static org.labkey.api.util.FileUtil.toFileForRead; +import static org.labkey.api.util.FileUtil.toFileForWrite; + +public abstract class AbstractQueryUpdateService implements QueryUpdateService +{ + protected final TableInfo _queryTable; + + private boolean _bulkLoad = false; + private CaseInsensitiveHashMap _columnImportMap = null; + private VirtualFile _att = null; + + /* AbstractQueryUpdateService is generally responsible for some shared functionality + * - triggers + * - coercion/validation + * - detailed logging + * - attachments + * + * If a subclass wants to disable some of these features (w/o subclassing), put flags here... + */ + protected boolean _enableExistingRecordsDataIterator = true; + protected Set _previouslyUpdatedRows = new HashSet<>(); + + protected AbstractQueryUpdateService(TableInfo queryTable) + { + if (queryTable == null) + throw new IllegalArgumentException(); + _queryTable = queryTable; + } + + protected TableInfo getQueryTable() + { + return _queryTable; + } + + public @NotNull Set getPreviouslyUpdatedRows() + { + return _previouslyUpdatedRows == null ? new HashSet<>() : _previouslyUpdatedRows; + } + + @Override + public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class acl) + { + return getQueryTable().hasPermission(user, acl); + } + + protected Map getRow(User user, Container container, Map keys, boolean allowCrossContainer) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + return getRow(user, container, keys); + } + + protected abstract Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException; + + @Override + public List> getRows(User user, Container container, List> keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + if (!hasPermission(user, ReadPermission.class)) + throw new UnauthorizedException("You do not have permission to read data from this table."); + + List> result = new ArrayList<>(); + for (Map rowKeys : keys) + { + Map row = getRow(user, container, rowKeys); + if (row != null) + result.add(row); + } + return result; + } + + @Override + public Map> getExistingRows(User user, Container container, Map> keys, boolean verifyNoCrossFolderData, boolean verifyExisting, @Nullable Set columns) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + if (!hasPermission(user, ReadPermission.class)) + throw new UnauthorizedException("You do not have permission to read data from this table."); + + Map> result = new LinkedHashMap<>(); + for (Map.Entry> key : keys.entrySet()) + { + Map row = getRow(user, container, key.getValue(), verifyNoCrossFolderData); + if (row != null && !row.isEmpty()) + { + result.put(key.getKey(), row); + if (verifyNoCrossFolderData) + { + String dataContainer = (String) row.get("container"); + if (StringUtils.isEmpty(dataContainer)) + dataContainer = (String) row.get("folder"); + if (!container.getId().equals(dataContainer)) + throw new InvalidKeyException("Data does not belong to folder '" + container.getName() + "': " + key.getValue().values()); + } + } + else if (verifyExisting) + throw new InvalidKeyException("Data not found for " + key.getValue().values()); + } + return result; + } + + @Override + public boolean hasExistingRowsInOtherContainers(Container container, Map> keys) + { + return false; + } + + public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction) + { + return createTransactionAuditEvent(container, auditAction, null); + } + + public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction, @Nullable Map details) + { + long auditId = DbSequenceManager.get(ContainerManager.getRoot(), DB_SEQUENCE_NAME).next(); + TransactionAuditProvider.TransactionAuditEvent event = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, auditId); + if (details != null) + event.addDetails(details); + return event; + } + + public static void addTransactionAuditEvent(DbScope.Transaction transaction, User user, TransactionAuditProvider.TransactionAuditEvent auditEvent) + { + UserSchema schema = AuditLogService.getAuditLogSchema(user, ContainerManager.getRoot()); + + if (schema != null) + { + // This is a little hack to ensure that the audit table has actually been created and gets put into the table cache by the time the + // pre-commit task is executed. Otherwise, since the creation of the table happens while within the commit for the + // outermost transaction, it looks like there is a close that hasn't happened when trying to commit the transaction for creating the + // table. + schema.getTable(auditEvent.getEventType(), false); + + transaction.addCommitTask(() -> AuditLogService.get().addEvent(user, auditEvent), DbScope.CommitTaskOption.PRECOMMIT); + + transaction.setAuditEvent(auditEvent); + } + } + + protected final DataIteratorContext getDataIteratorContext(BatchValidationException errors, InsertOption forImport, Map configParameters) + { + if (null == errors) + errors = new BatchValidationException(); + DataIteratorContext context = new DataIteratorContext(errors); + context.setInsertOption(forImport); + context.setConfigParameters(configParameters); + configureDataIteratorContext(context); + recordDataIteratorUsed(configParameters); + + return context; + } + + protected void recordDataIteratorUsed(@Nullable Map configParameters) + { + if (configParameters == null) + return; + + try + { + configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); + } + catch (UnsupportedOperationException ignore) + { + // configParameters is immutable, likely originated from a junit test + } + } + + /** + * If QUS wants to use something other than PKs to select existing rows for merge, it can override this method. + * Used only for generating ExistingRecordDataIterator at the moment. + */ + protected Set getSelectKeys(DataIteratorContext context) + { + if (!context.getAlternateKeys().isEmpty()) + return context.getAlternateKeys(); + return null; + } + + /* + * construct the core DataIterator transformation pipeline for this table, may be just StandardDataIteratorBuilder. + * does NOT handle triggers or the insert/update iterator. + */ + public DataIteratorBuilder createImportDIB(User user, Container container, DataIteratorBuilder data, DataIteratorContext context) + { + DataIteratorBuilder dib = StandardDataIteratorBuilder.forInsert(getQueryTable(), data, container, user); + + if (_enableExistingRecordsDataIterator || context.getInsertOption().updateOnly) + { + // some tables need to generate PKs, so they need to add ExistingRecordDataIterator in persistRows() (after generating PK, before inserting) + dib = ExistingRecordDataIterator.createBuilder(dib, getQueryTable(), getSelectKeys(context)); + } + + dib = ((UpdateableTableInfo) getQueryTable()).persistRows(dib, context); + dib = AttachmentDataIterator.getAttachmentDataIteratorBuilder(getQueryTable(), dib, user, context.getInsertOption().batch ? getAttachmentDirectory() : null, container, getAttachmentParentFactory()); + dib = DetailedAuditLogDataIterator.getDataIteratorBuilder(getQueryTable(), dib, context.getInsertOption(), user, container, null); + return dib; + } + + + /** + * Implementation to use insertRows() while we migrate to using DIB for all code paths + *

+ * DataIterator should/must use the same error collection as passed in + */ + @Deprecated + protected int _importRowsUsingInsertRows(User user, Container container, DataIterator rows, BatchValidationException errors, Map extraScriptContext) + { + MapDataIterator mapIterator = DataIteratorUtil.wrapMap(rows, true); + List> list = new ArrayList<>(); + List> ret; + Exception rowException; + + try + { + while (mapIterator.next()) + list.add(mapIterator.getMap()); + ret = insertRows(user, container, list, errors, null, extraScriptContext); + if (errors.hasErrors()) + return 0; + return ret.size(); + } + catch (BatchValidationException x) + { + assert x == errors; + assert x.hasErrors(); + return 0; + } + catch (QueryUpdateServiceException | DuplicateKeyException | SQLException x) + { + rowException = x; + } + finally + { + DataIteratorUtil.closeQuietly(mapIterator); + } + errors.addRowError(new ValidationException(rowException.getMessage())); + return 0; + } + + protected boolean hasImportRowsPermission(User user, Container container, DataIteratorContext context) + { + return hasPermission(user, context.getInsertOption().updateOnly ? UpdatePermission.class : InsertPermission.class); + } + + protected boolean hasInsertRowsPermission(User user) + { + return hasPermission(user, InsertPermission.class); + } + + protected boolean hasDeleteRowsPermission(User user) + { + return hasPermission(user, DeletePermission.class); + } + + protected boolean hasUpdateRowsPermission(User user) + { + return hasPermission(user, UpdatePermission.class); + } + + // override this + protected void preImportDIBValidation(@Nullable DataIteratorBuilder in, @Nullable Collection inputColumns) + { + } + + protected int _importRowsUsingDIB(User user, Container container, DataIteratorBuilder in, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasImportRowsPermission(user, container, context)) + throw new UnauthorizedException("You do not have permission to " + (context.getInsertOption().updateOnly ? "update data in this table." : "insert data into this table.")); + + if (!context.getConfigParameterBoolean(ConfigParameters.SkipInsertOptionValidation)) + assert(getQueryTable().supportsInsertOption(context.getInsertOption())); + + context.getErrors().setExtraContext(extraScriptContext); + if (extraScriptContext != null) + { + context.setDataSource((String) extraScriptContext.get(DataIteratorUtil.DATA_SOURCE)); + } + + preImportDIBValidation(in, null); + + boolean skipTriggers = context.getConfigParameterBoolean(ConfigParameters.SkipTriggers) || context.isCrossTypeImport() || context.isCrossFolderImport(); + boolean hasTableScript = hasTableScript(container); + TriggerDataBuilderHelper helper = new TriggerDataBuilderHelper(getQueryTable(), container, user, extraScriptContext, context.getInsertOption().useImportAliases); + if (!skipTriggers) + { + in = preTriggerDataIterator(in, context); + if (hasTableScript) + in = helper.before(in); + } + DataIteratorBuilder importDIB = createImportDIB(user, container, in, context); + DataIteratorBuilder out = importDIB; + + if (!skipTriggers) + { + if (hasTableScript) + out = helper.after(importDIB); + + out = postTriggerDataIterator(out, context); + } + + if (hasTableScript) + { + context.setFailFast(false); + context.setMaxRowErrors(Math.max(context.getMaxRowErrors(),1000)); + } + int count = _pump(out, outputRows, context); + + if (context.getErrors().hasErrors()) + return 0; + + if (!context.getConfigParameterBoolean(ConfigParameters.SkipAuditSummary)) + _addSummaryAuditEvent(container, user, context, count); + + return count; + } + + protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) + { + return in; + } + + protected DataIteratorBuilder postTriggerDataIterator(DataIteratorBuilder out, DataIteratorContext context) + { + return out; + } + + /** this is extracted so subclasses can add wrap */ + protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> rows, DataIteratorContext context) + { + DataIterator it = etl.getDataIterator(context); + + if (null == it) + return 0; + + try + { + if (null != rows) + { + MapDataIterator maps = DataIteratorUtil.wrapMap(it, false); + it = new WrapperDataIterator(maps) + { + @Override + public boolean next() throws BatchValidationException + { + boolean ret = super.next(); + if (ret) + rows.add(((MapDataIterator)_delegate).getMap()); + return ret; + } + }; + } + + Pump pump = new Pump(it, context); + pump.run(); + + return pump.getRowCount(); + } + finally + { + DataIteratorUtil.closeQuietly(it); + } + } + + /* can be used for simple bookkeeping tasks, per row processing belongs in a data iterator */ + protected void afterInsertUpdate(int count, BatchValidationException errors, boolean isUpdate) + { + afterInsertUpdate(count, errors); + } + + protected void afterInsertUpdate(int count, BatchValidationException errors) + {} + + @Override + public int loadRows(User user, Container container, DataIteratorBuilder rows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + return loadRows(user, container, rows, null, context, extraScriptContext); + } + + public int loadRows(User user, Container container, DataIteratorBuilder rows, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + configureDataIteratorContext(context); + int count = _importRowsUsingDIB(user, container, rows, outputRows, context, extraScriptContext); + afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); + return count; + } + + @Override + public int importRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, Map configParameters, @Nullable Map extraScriptContext) + { + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.IMPORT, configParameters); + int count = _importRowsUsingInsertRows(user, container, rows.getDataIterator(context), errors, extraScriptContext); + afterInsertUpdate(count, errors, context.getInsertOption().updateOnly); + return count; + } + + @Override + public int mergeRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + { + throw new UnsupportedOperationException("merge is not supported for all tables"); + } + + private boolean hasTableScript(Container container) + { + return getQueryTable().hasTriggers(container); + } + + + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException("Not implemented by this QueryUpdateService"); + } + + + protected @Nullable List> _insertRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasInsertRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to insert data into this table."); + + return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); + } + + protected @Nullable List> _insertUpdateRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + DataIteratorBuilder dib = _toDataIteratorBuilder(getClass().getSimpleName() + (context.getInsertOption().updateOnly ? ".updateRows" : ".insertRows()"), rows); + ArrayList> outputRows = new ArrayList<>(); + int count = _importRowsUsingDIB(user, container, dib, outputRows, context, extraScriptContext); + afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); + + if (context.getErrors().hasErrors()) + return null; + + return outputRows; + } + + // not yet supported + protected @Nullable List> _updateRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasUpdateRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to update data in this table."); + + return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); + } + + + protected DataIteratorBuilder _toDataIteratorBuilder(String debugName, List> rows) + { + // TODO probably can't assume all rows have all columns + // TODO can we assume that all rows refer to columns consistently? (not PTID and MouseId for the same column) + // TODO optimize ArrayListMap? + Set colNames; + + if (!rows.isEmpty() && rows.get(0) instanceof ArrayListMap) + { + colNames = ((ArrayListMap)rows.get(0)).getFindMap().keySet(); + } + else + { + // Preserve casing by using wrapped CaseInsensitiveHashMap instead of CaseInsensitiveHashSet + colNames = Sets.newCaseInsensitiveHashSet(); + for (Map row : rows) + colNames.addAll(row.keySet()); + } + + preImportDIBValidation(null, colNames); + return MapDataIterator.of(colNames, rows, debugName); + } + + + /** @deprecated switch to using DIB based method */ + @Deprecated + protected List> _insertRowsUsingInsertRow(User user, Container container, List> rows, BatchValidationException errors, Map extraScriptContext) + throws DuplicateKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasInsertRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to insert data into this table."); + + assert(getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + boolean hasTableScript = hasTableScript(container); + + errors.setExtraContext(extraScriptContext); + if (hasTableScript) + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); + + List> result = new ArrayList<>(rows.size()); + List> providedValues = new ArrayList<>(rows.size()); + for (int i = 0; i < rows.size(); i++) + { + Map row = rows.get(i); + row = normalizeColumnNames(row); + try + { + providedValues.add(new CaseInsensitiveHashMap<>()); + row = coerceTypes(row, providedValues.get(i), false); + if (hasTableScript) + { + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, true, i, row, null, extraScriptContext); + } + row = insertRow(user, container, row); + if (row == null) + continue; + + if (hasTableScript) + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, false, i, row, null, extraScriptContext); + result.add(row); + } + catch (SQLException sqlx) + { + if (StringUtils.startsWith(sqlx.getSQLState(), "22") || RuntimeSQLException.isConstraintException(sqlx)) + { + ValidationException vex = new ValidationException(sqlx.getMessage()); + vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i+1); + errors.addRowError(vex); + } + else if (SqlDialect.isTransactionException(sqlx) && errors.hasErrors()) + { + // if we already have some errors, just break + break; + } + else + { + throw sqlx; + } + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + } + + if (hasTableScript) + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); + + addAuditEvent(user, container, QueryService.AuditAction.INSERT, null, result, null, providedValues); + + return result; + } + + protected void addAuditEvent(User user, Container container, QueryService.AuditAction auditAction, @Nullable Map configParameters, @Nullable List> rows, @Nullable List> existingRows, @Nullable List> providedValues) + { + if (!isBulkLoad()) + { + AuditBehaviorType auditBehavior = configParameters != null ? (AuditBehaviorType) configParameters.get(AuditBehavior) : null; + String userComment = configParameters == null ? null : (String) configParameters.get(AuditUserComment); + getQueryTable().getAuditHandler(auditBehavior) + .addAuditEvent(user, container, getQueryTable(), auditBehavior, userComment, auditAction, rows, existingRows, providedValues); + } + } + + private Map normalizeColumnNames(Map row) + { + if(_columnImportMap == null) + { + _columnImportMap = (CaseInsensitiveHashMap)ImportAliasable.Helper.createImportMap(getQueryTable().getColumns(), false); + } + + Map newRow = new CaseInsensitiveHashMap<>(); + CaseInsensitiveHashSet columns = new CaseInsensitiveHashSet(); + columns.addAll(row.keySet()); + + String newName; + for(String key : row.keySet()) + { + if(_columnImportMap.containsKey(key)) + { + //it is possible for a normalized name to conflict with an existing property. if so, defer to the original + newName = _columnImportMap.get(key).getName(); + if(!columns.contains(newName)){ + newRow.put(newName, row.get(key)); + continue; + } + } + newRow.put(key, row.get(key)); + } + + return newRow; + } + + @Override + public List> insertRows(User user, Container container, List> rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + throws DuplicateKeyException, QueryUpdateServiceException, SQLException + { + try + { + List> ret = _insertRowsUsingInsertRow(user, container, rows, errors, extraScriptContext); + afterInsertUpdate(null==ret?0:ret.size(), errors); + if (errors.hasErrors()) + return null; + return ret; + } + catch (BatchValidationException x) + { + assert x == errors; + assert x.hasErrors(); + } + return null; + } + + protected Object coerceTypesValue(ColumnInfo col, Map providedValues, String key, Object value) + { + if (col != null && value != null && + !col.getJavaObjectClass().isInstance(value) && + !(value instanceof AttachmentFile) && + !(value instanceof MultipartFile) && + !(value instanceof String[]) && + !(col.isMultiValued() || col.getFk() instanceof MultiValuedForeignKey)) + { + try + { + if (col.getKindOfQuantity() != null) + providedValues.put(key, value); + if (PropertyType.FILE_LINK.equals(col.getPropertyType())) + value = ExpDataFileConverter.convert(value); + else + value = col.convert(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw e; + } + catch (ConversionException e) + { + // That's OK, the transformation script may be able to fix up the value before it gets inserted + } + } + + return value; + } + + /** Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion */ + @Deprecated + protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) + { + Map result = new CaseInsensitiveHashMap<>(row.size()); + Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); + for (Map.Entry entry : row.entrySet()) + { + ColumnInfo col = columnMap.get(entry.getKey()); + Object value = coerceTypesValue(col, providedValues, entry.getKey(), entry.getValue()); + result.put(entry.getKey(), value); + } + + return result; + } + + protected abstract Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; + + + protected boolean firstUpdateRow = true; + Function,Map> updateTransform = Function.identity(); + + /* Do standard AQUS stuff here, then call the subclass specific implementation of updateRow() */ + final protected Map updateOneRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + if (firstUpdateRow) + { + firstUpdateRow = false; + if (null != OntologyService.get()) + { + var t = OntologyService.get().getConceptUpdateHandler(_queryTable); + if (null != t) + updateTransform = t; + } + } + row = updateTransform.apply(row); + return updateRow(user, container, row, oldRow, configParameters); + } + + // used by updateRows to check if all rows have the same set of keys + // prepared statement can only be used to updateRows if all rows have the same set of keys + protected static boolean hasUniformKeys(List> rowsToUpdate) + { + if (rowsToUpdate == null || rowsToUpdate.isEmpty()) + return false; + + if (rowsToUpdate.size() == 1) + return true; + + Set keys = rowsToUpdate.get(0).keySet(); + int keySize = keys.size(); + + for (int i = 1 ; i < rowsToUpdate.size(); i ++) + { + Set otherKeys = rowsToUpdate.get(i).keySet(); + if (otherKeys.size() != keySize) + return false; + if (!otherKeys.containsAll(keys)) + return false; + } + + return true; + } + + @Override + public List> updateRows(User user, Container container, List> rows, List> oldKeys, + BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasUpdateRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to update data in this table."); + + if (oldKeys != null && rows.size() != oldKeys.size()) + throw new IllegalArgumentException("rows and oldKeys are required to be the same length, but were " + rows.size() + " and " + oldKeys + " in length, respectively"); + + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); + + List> result = new ArrayList<>(rows.size()); + List> oldRows = new ArrayList<>(rows.size()); + List> providedValues = new ArrayList<>(rows.size()); + // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container + boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; + + for (int i = 0; i < rows.size(); i++) + { + Map row = rows.get(i); + providedValues.add(new CaseInsensitiveHashMap<>()); + row = coerceTypes(row, providedValues.get(i), true); + try + { + Map oldKey = oldKeys == null ? row : oldKeys.get(i); + Map oldRow = null; + if (!streaming) + { + oldRow = getRow(user, container, oldKey); + if (oldRow == null) + throw new NotFoundException("The existing row was not found."); + } + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, true, i, row, oldRow, extraScriptContext); + Map updatedRow = updateOneRow(user, container, row, oldRow, configParameters); + if (!streaming && updatedRow == null) + continue; + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow, extraScriptContext); + if (!streaming) + { + result.add(updatedRow); + oldRows.add(oldRow); + } + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (OptimisticConflictException e) + { + errors.addRowError(new ValidationException("Unable to update. Row may have been deleted.")); + } + } + + // Fire triggers, if any, and also throw if there are any errors + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); + afterInsertUpdate(null==result?0:result.size(), errors, true); + + if (errors.hasErrors()) + throw errors; + + addAuditEvent(user, container, QueryService.AuditAction.UPDATE, configParameters, result, oldRows, providedValues); + + return result; + } + + protected void checkDuplicateUpdate(Object pkVals) throws ValidationException + { + if (pkVals == null) + return; + + Set updatedRows = getPreviouslyUpdatedRows(); + + Object[] keysObj; + if (pkVals.getClass().isArray()) + keysObj = (Object[]) pkVals; + else if (pkVals instanceof Map map) + { + List orderedKeyVals = new ArrayList<>(); + SortedSet sortedKeys = new TreeSet<>(map.keySet()); + for (String key : sortedKeys) + orderedKeyVals.add(map.get(key)); + keysObj = orderedKeyVals.toArray(); + } + else + keysObj = new Object[]{pkVals}; + + if (keysObj.length == 1) + { + if (updatedRows.contains(keysObj[0])) + throw new ValidationException("Duplicate key provided: " + keysObj[0]); + updatedRows.add(keysObj[0]); + return; + } + + List keys = new ArrayList<>(); + for (Object key : keysObj) + keys.add(String.valueOf(key)); + if (updatedRows.contains(keys)) + throw new ValidationException("Duplicate key provided: " + StringUtils.join(keys, ", ")); + updatedRows.add(keys); + } + + @Override + public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException("Move is not supported for this table type."); + } + + protected abstract Map deleteRow(User user, Container container, Map oldRow) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; + + protected Map deleteRow(User user, Container container, Map oldRow, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return deleteRow(user, container, oldRow); + } + + @Override + public List> deleteRows(User user, Container container, List> keys, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasDeleteRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to delete data from this table."); + + BatchValidationException errors = new BatchValidationException(); + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); + + // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container + boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; + + List> result = new ArrayList<>(keys.size()); + for (int i = 0; i < keys.size(); i++) + { + Map key = keys.get(i); + try + { + Map oldRow = null; + if (!streaming) + { + oldRow = getRow(user, container, key); + // if row doesn't exist, bail early + if (oldRow == null) + continue; + } + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, true, i, null, oldRow, extraScriptContext); + Map updatedRow = deleteRow(user, container, oldRow, configParameters, extraScriptContext); + if (!streaming && updatedRow == null) + continue; + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, false, i, null, updatedRow, extraScriptContext); + result.add(updatedRow); + } + catch (InvalidKeyException ex) + { + ValidationException vex = new ValidationException(ex.getMessage()); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + } + + // Fire triggers, if any, and also throw if there are any errors + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); + + addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, result, null, null); + + return result; + } + + protected int truncateRows(User user, Container container) + throws QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException(); + } + + @Override + public int truncateRows(User user, Container container, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!container.hasPermission(user, AdminPermission.class) && !hasDeleteRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to truncate this table."); + + BatchValidationException errors = new BatchValidationException(); + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, true, errors, extraScriptContext); + + int result = truncateRows(user, container); + + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, false, errors, extraScriptContext); + addAuditEvent(user, container, QueryService.AuditAction.TRUNCATE, configParameters, null, null, null); + + return result; + } + + @Override + public void setBulkLoad(boolean bulkLoad) + { + _bulkLoad = bulkLoad; + } + + @Override + public boolean isBulkLoad() + { + return _bulkLoad; + } + + public static Object saveFile(User user, Container container, String name, Object value, @Nullable String dirName) throws ValidationException, QueryUpdateServiceException + { + FileLike dirPath = AssayFileWriter.getUploadDirectoryPath(container, dirName); + return saveFile(user, container, name, value, dirPath); + } + + /** + * Save uploaded file to dirName directory under file or pipeline root. + */ + public static Object saveFile(User user, Container container, String name, Object value, @Nullable FileLike dirPath) throws ValidationException, QueryUpdateServiceException + { + if (!(value instanceof MultipartFile) && !(value instanceof SpringAttachmentFile)) + throw new ValidationException("Invalid file value"); + + String auditMessageFormat = "Saved file '%s' for field '%s' in folder %s."; + FileLike file = null; + try + { + FileLike dir = AssayFileWriter.ensureUploadDirectory(dirPath); + + FileSystemAuditProvider.FileSystemAuditEvent event = new FileSystemAuditProvider.FileSystemAuditEvent(container, null); + if (value instanceof MultipartFile multipartFile) + { + // Once we've found one, write it to disk and replace the row's value with just the File reference to it + if (multipartFile.isEmpty()) + { + throw new ValidationException("File " + multipartFile.getOriginalFilename() + " for field " + name + " has no content"); + } + file = FileUtil.findUniqueFileName(multipartFile.getOriginalFilename(), dir); + checkFileUnderRoot(container, file); + multipartFile.transferTo(toFileForWrite(file)); + event.setComment(String.format(auditMessageFormat, multipartFile.getOriginalFilename(), name, container.getPath())); + event.setProvidedFileName(multipartFile.getOriginalFilename()); + } + else + { + SpringAttachmentFile saf = (SpringAttachmentFile) value; + file = FileUtil.findUniqueFileName(saf.getFilename(), dir); + checkFileUnderRoot(container, file); + saf.saveTo(file); + event.setComment(String.format(auditMessageFormat, saf.getFilename(), name, container.getPath())); + event.setProvidedFileName(saf.getFilename()); + } + event.setFile(file.getName()); + event.setFieldName(name); + event.setDirectory(file.getParent().toURI().getPath()); + AuditLogService.get().addEvent(user, event); + } + catch (IOException | ExperimentException e) + { + throw new QueryUpdateServiceException(e); + } + + ensureExpData(user, container, file.toNioPathForRead().toFile()); + return file; + } + + public static ExpData ensureExpData(User user, Container container, File file) + { + ExpData existingData = ExperimentService.get().getExpDataByURL(file, container); + // create exp.data record + if (existingData == null) + { + File canonicalFile = FileUtil.getAbsoluteCaseSensitiveFile(file); + ExpData data = ExperimentService.get().createData(container, UPLOADED_FILE); + data.setName(file.getName()); + data.setDataFileURI(canonicalFile.toPath().toUri()); + if (data.getDataFileUrl() != null && data.getDataFileUrl().length() <= ExperimentService.get().getTinfoData().getColumn("DataFileURL").getScale()) + { + // If the path is too long to store, bail out without creating an exp.data row + data.save(user); + } + + return data; + } + + return existingData; + } + + // For security reasons, make sure the user hasn't tried to reference a file that's not under + // the pipeline root or @assayfiles root. Otherwise, they could get access to any file on the server + static FileLike checkFileUnderRoot(Container container, FileLike file) throws ExperimentException + { + Path assayFilesRoot = FileContentService.get().getFileRootPath(container, FileContentService.ContentType.assayfiles); + if (assayFilesRoot != null && URIUtil.isDescendant(assayFilesRoot.toUri(), file.toURI())) + return file; + + PipeRoot root = PipelineService.get().findPipelineRoot(container); + if (root == null) + throw new ExperimentException("Pipeline root not available in container " + container.getPath()); + + if (!root.isUnderRoot(toFileForRead(file))) + { + throw new ExperimentException("Cannot reference file '" + file + "' from " + container.getPath()); + } + + return file; + } + + protected void _addSummaryAuditEvent(Container container, User user, DataIteratorContext context, int count) + { + if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level + { + AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); + String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); + boolean skipAuditLevelCheck = false; + if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) + { + if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad + skipAuditLevelCheck = true; + } + getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); + } + } + + /** + * Is used by the AttachmentDataIterator to point to the location of the serialized + * attachment files. + */ + public void setAttachmentDirectory(VirtualFile att) + { + _att = att; + } + + @Nullable + protected VirtualFile getAttachmentDirectory() + { + return _att; + } + + /** + * QUS instances that allow import of attachments through the AttachmentDataIterator should furnish a factory + * implementation in order to resolve the attachment parent on incoming attachment files. + */ + @Nullable + protected AttachmentParentFactory getAttachmentParentFactory() + { + return null; + } + + /** Translate between the column name that query is exposing to the column name that actually lives in the database */ + protected static void aliasColumns(Map columnMapping, Map row) + { + for (Map.Entry entry : columnMapping.entrySet()) + { + if (row.containsKey(entry.getValue()) && !row.containsKey(entry.getKey())) + { + row.put(entry.getKey(), row.get(entry.getValue())); + } + } + } + + /** + * The database table has underscores for MV column names, but we expose a column without the underscore. + * Therefore, we need to translate between the two sets of column names. + * @return database column name -> exposed TableInfo column name + */ + protected static Map createMVMapping(Domain domain) + { + Map result = new CaseInsensitiveHashMap<>(); + if (domain != null) + { + for (DomainProperty domainProperty : domain.getProperties()) + { + if (domainProperty.isMvEnabled()) + { + result.put(PropertyStorageSpec.getMvIndicatorStorageColumnName(domainProperty.getPropertyDescriptor()), domainProperty.getName() + MvColumn.MV_INDICATOR_SUFFIX); + } + } + } + return result; + } + + @TestWhen(TestWhen.When.BVT) + public static class TestCase extends Assert + { + private boolean _useAlias = false; + + static TabLoader getTestData() throws IOException + { + TabLoader testData = new TabLoader(new StringReader("pk,i,s\n0,0,zero\n1,1,one\n2,2,two"),true); + testData.parseAsCSV(); + testData.getColumns()[0].clazz = Integer.class; + testData.getColumns()[1].clazz = Integer.class; + testData.getColumns()[2].clazz = String.class; + return testData; + } + + @BeforeClass + public static void createList() throws Exception + { + if (null == ListService.get()) + return; + deleteList(); + + TabLoader testData = getTestData(); + String hash = GUID.makeHash(); + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + ListService s = ListService.get(); + UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); + assertNotNull(lists); + + ListDefinition R = s.createList(c, "R", ListDefinition.KeyType.Integer); + R.setKeyName("pk"); + Domain d = requireNonNull(R.getDomain()); + for (int i=0 ; i> getRows() + { + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); + TableInfo rTableInfo = requireNonNull(lists.getTable("R", null)); + return Arrays.asList(new TableSelector(rTableInfo, TableSelector.ALL_COLUMNS, null, new Sort("PK")).getMapArray()); + } + + @Before + public void resetList() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + qus.truncateRows(user, c, null, null); + } + + @AfterClass + public static void deleteList() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + ListService s = ListService.get(); + Map m = s.getLists(c); + if (m.containsKey("R")) + m.get("R").delete(user); + } + + void validateDefaultData(List> rows) + { + assertEquals(3, rows.size()); + + assertEquals(0, rows.get(0).get("pk")); + assertEquals(1, rows.get(1).get("pk")); + assertEquals(2, rows.get(2).get("pk")); + + assertEquals(0, rows.get(0).get("i")); + assertEquals(1, rows.get(1).get("i")); + assertEquals(2, rows.get(2).get("i")); + + assertEquals("zero", rows.get(0).get("s")); + assertEquals("one", rows.get(1).get("s")); + assertEquals("two", rows.get(2).get("s")); + } + + @Test + public void INSERT() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + assert(getRows().isEmpty()); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + BatchValidationException errors = new BatchValidationException(); + var rows = qus.insertRows(user, c, getTestData().load(), errors, null, null); + assertFalse(errors.hasErrors()); + validateDefaultData(rows); + validateDefaultData(getRows()); + + qus.insertRows(user, c, getTestData().load(), errors, null, null); + assertTrue(errors.hasErrors()); + } + + @Test + public void UPSERT() throws Exception + { + if (null == ListService.get()) + return; + /* not sure how you use/test ImportOptions.UPSERT + * the only row returning QUS method is insertRows(), which doesn't let you specify the InsertOption? + */ + } + + @Test + public void IMPORT() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + assert(getRows().isEmpty()); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + BatchValidationException errors = new BatchValidationException(); + var count = qus.importRows(user, c, getTestData(), errors, null, null); + assertFalse(errors.hasErrors()); + assert(count == 3); + validateDefaultData(getRows()); + + qus.importRows(user, c, getTestData(), errors, null, null); + assertTrue(errors.hasErrors()); + } + + @Test + public void MERGE() throws Exception + { + if (null == ListService.get()) + return; + INSERT(); + assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var mergeRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); + BatchValidationException errors = new BatchValidationException() + { + @Override + public void addRowError(ValidationException vex) + { + LogManager.getLogger(AbstractQueryUpdateService.class).error("test error", vex); + fail(vex.getMessage()); + } + }; + int count=0; + try (var tx = rTableInfo.getSchema().getScope().ensureTransaction()) + { + var ret = qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); + if (!errors.hasErrors()) + { + tx.commit(); + count = ret; + } + } + assertFalse("mergeRows error(s): " + errors.getMessage(), errors.hasErrors()); + assertEquals(2, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO", rows.get(2).get("s")); + // test existing row value is not updated + assertEquals(2, rows.get(2).get("i")); + // test new row + assertEquals("THREE", rows.get(3).get("s")); + assertNull(rows.get(3).get("i")); + + // merge should fail if duplicate keys are provided + errors = new BatchValidationException(); + mergeRows = new ArrayList<>(); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); + qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); + assertTrue(errors.hasErrors()); + assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); + } + + @Test + public void UPDATE() throws Exception + { + if (null == ListService.get()) + return; + INSERT(); + assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var updateRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + + // update using data iterator + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP")); + DataIteratorContext context = new DataIteratorContext(); + context.setInsertOption(InsertOption.UPDATE); + var count = qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertFalse(context.getErrors().hasErrors()); + assertEquals(1, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO-UP", rows.get(2).get("s")); + // test existing row value is not updated/erased + assertEquals(2, rows.get(2).get("i")); + + // update should fail if a new record is provided + updateRows = new ArrayList<>(); + updateRows.add(CaseInsensitiveHashMap.of(pkName,123,colName,"NEW")); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertTrue(context.getErrors().hasErrors()); + + // Issue 52728: update should fail if duplicate key is provide + updateRows = new ArrayList<>(); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); + + // use DIB + context = new DataIteratorContext(); + context.setInsertOption(InsertOption.UPDATE); + qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertTrue(context.getErrors().hasErrors()); + assertTrue("Duplicate key error: " + context.getErrors().getMessage(), context.getErrors().getMessage().contains("Duplicate key provided: 2")); + + // use updateRows + if (!_useAlias) // _update using alias is not supported + { + BatchValidationException errors = new BatchValidationException(); + try + { + qus.updateRows(user, c, updateRows, null, errors, null, null); + } + catch (Exception e) + { + + } + assertTrue(errors.hasErrors()); + assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); + + } + } + + @Test + public void REPLACE() throws Exception + { + if (null == ListService.get()) + return; + assert(getRows().isEmpty()); + INSERT(); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var mergeRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); + DataIteratorContext context = new DataIteratorContext(); + context.setInsertOption(InsertOption.REPLACE); + var count = qus.loadRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), context, null); + assertFalse(context.getErrors().hasErrors()); + assertEquals(2, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO", rows.get(2).get("s")); + // test existing row value is updated + assertNull(rows.get(2).get("i")); + // test new row + assertEquals("THREE", rows.get(3).get("s")); + assertNull(rows.get(3).get("i")); + } + + @Test + public void IMPORT_IDENTITY() + { + if (null == ListService.get()) + return; + // TODO + } + + @Test + public void ALIAS_MERGE() throws Exception + { + _useAlias = true; + MERGE(); + } + + @Test + public void ALIAS_REPLACE() throws Exception + { + _useAlias = true; + REPLACE(); + } + + @Test + public void ALIAS_UPDATE() throws Exception + { + _useAlias = true; + UPDATE(); + } + } +} diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index 1299cea69d9..f62a69ca510 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -1,941 +1,936 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.beanutils.ConvertUtils; -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveMapWrapper; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MvUtil; -import org.labkey.api.data.Parameter; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.validator.ColumnValidator; -import org.labkey.api.data.validator.ColumnValidators; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.OntologyObject; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.reader.ColumnDescriptor; -import org.labkey.api.reader.DataLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.CachingSupplier; -import org.labkey.api.util.Pair; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.IOException; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Supplier; - -/** - * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. - * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the - * hard table columns. - */ -public class DefaultQueryUpdateService extends AbstractQueryUpdateService -{ - private final TableInfo _dbTable; - private DomainUpdateHelper _helper = null; - /** - * Map from DbTable column names to QueryTable column names, if they have been aliased - */ - protected Map _columnMapping = Collections.emptyMap(); - /** - * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process - */ - private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); - private final ValidatorContext _validatorContext; - private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); - - public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) - { - super(queryTable); - _dbTable = dbTable; - - if (queryTable.getUserSchema() == null) - throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); - - _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); - } - - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) - { - this(queryTable, dbTable); - _helper = helper; - } - - /** - * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased - */ - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) - { - this(queryTable, dbTable); - _columnMapping = columnMapping; - } - - protected TableInfo getDbTable() - { - return _dbTable; - } - - protected Domain getDomain() - { - return _helper == null ? null : _helper.getDomain(); - } - - protected ColumnInfo getObjectUriColumn() - { - return _helper == null ? null : _helper.getObjectUriColumn(); - } - - protected String createObjectURI() - { - return _helper == null ? null : _helper.createObjectURI(); - } - - protected Iterable getPropertyColumns() - { - return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); - } - - protected Map getColumnMapping() - { - return _columnMapping; - } - - /** - * Returns the container that the domain is defined - */ - protected Container getDomainContainer(Container c) - { - return _helper == null ? c : _helper.getDomainContainer(c); - } - - /** - * Returns the container to insert/update values into - */ - protected Container getDomainObjContainer(Container c) - { - return _helper == null ? c : _helper.getDomainObjContainer(c); - } - - protected Set getAutoPopulatedColumns() - { - return Table.AUTOPOPULATED_COLUMN_NAMES; - } - - public interface DomainUpdateHelper - { - Domain getDomain(); - - ColumnInfo getObjectUriColumn(); - - String createObjectURI(); - - // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. - Iterable getPropertyColumns(); - - Container getDomainContainer(Container c); - - Container getDomainObjContainer(Container c); - } - - public class ImportHelper implements OntologyManager.ImportHelper - { - ImportHelper() - { - } - - @Override - public String beforeImportObject(Map map) - { - ColumnInfo objectUriCol = getObjectUriColumn(); - - // Get existing Lsid - String lsid = (String) map.get(objectUriCol.getName()); - if (lsid != null) - return lsid; - - // Generate a new Lsid - lsid = createObjectURI(); - map.put(objectUriCol.getName(), lsid); - return lsid; - } - - @Override - public void afterBatchInsert(int currentRow) - { - } - - @Override - public void updateStatistics(int currentRow) - { - } - } - - @Override - protected Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, keys); - Map row = _select(container, getKeys(keys, container)); - - //PostgreSQL includes a column named _row for the row index, but since this is selecting by - //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it - if (null != row) - { - if (row instanceof ArrayListMap) - ((ArrayListMap) row).getFindMap().remove("_row"); - else - row.remove("_row"); - } - - return row; - } - - protected Map _select(Container container, Object[] keys) throws ConversionException - { - TableInfo table = getDbTable(); - Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); - - Map row = new TableSelector(table).getMap(typedParameters); - - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) - { - String lsid = (String) row.get(objectUriCol.getName()); - if (lsid != null) - { - Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); - if (!propertyValues.isEmpty()) - { - // convert PropertyURI->value map into "Property name"->value map - Map propertyMap = domain.createImportMap(false); - for (Map.Entry entry : propertyValues.entrySet()) - { - String propertyURI = entry.getKey(); - DomainProperty dp = propertyMap.get(propertyURI); - PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; - if (pd != null) - row.put(pd.getName(), entry.getValue()); - } - } - } - // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been - // inserted before the table was made extensible), but make sure that we got an LSID field - // when fetching the row - else if (!row.containsKey(objectUriCol.getName())) - { - throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); - } - } - - return row; - } - - - private Object[] convertToTypedValues(Object[] keys, List cols) - { - Object[] typedParameters = new Object[keys.length]; - int t = 0; - for (int i = 0; i < keys.length; i++) - { - if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) - { - typedParameters[t++] = keys[i]; - continue; - } - Object v = keys[i]; - JdbcType type = cols.get(i).getJdbcType(); - if (v instanceof String) - v = type.convert(v); - Parameter.TypedValue tv = new Parameter.TypedValue(v, type); - typedParameters[t++] = tv; - } - return typedParameters; - } - - - @Override - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, row); - convertTypes(user, container, row); - setSpecialColumns(container, row, user, InsertPermission.class); - validateInsertRow(row); - return _insert(user, container, row); - } - - protected Map _insert(User user, Container c, Map row) - throws SQLException, ValidationException - { - assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - List pds = new ArrayList<>(); - Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - pds.add(pd); - Object value = getPropertyValue(row, pd); - values.put(pd.getPropertyURI(), value); - } - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); - String lsid = collector.getLsid(); - - // Add the new lsid to the row map. - row.put(objectUriCol.getName(), lsid); - } - - return Table.insert(user, getDbTable(), row); - } - catch (RuntimeValidationException e) - { - throw e.getValidationException(); - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - } - - @Override - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return updateRow(user, container, row, oldRow, false, false); - } - - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); - - // Flip the key/value pairs around for easy lookup - Map queryToDb = new CaseInsensitiveHashMap<>(); - for (Map.Entry entry : _columnMapping.entrySet()) - { - queryToDb.put(entry.getValue(), entry.getKey()); - } - - setSpecialColumns(container, row, user, UpdatePermission.class); - - Map tableAliasesMap = _tableMapSupplier.get(); - Map> colFrequency = new HashMap<>(); - - //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table - for (Map.Entry entry: row.entrySet()) - { - if (!rowStripped.containsKey(entry.getKey())) - { - ColumnInfo col = getQueryTable().getColumn(entry.getKey()); - - if (null == col) - { - col = tableAliasesMap.get(entry.getKey()); - } - - if (null != col) - { - final String name = col.getName(); - - // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. - if (col.isReadOnly() || col.isCalculated()) - continue; - - //when updating a row, we should strip the following fields, as they are - //automagically maintained by the table layer, and should not be allowed - //to change once the record exists. - //unfortunately, the Table.update() method doesn't strip these, so we'll - //do that here. - // Owner, CreatedBy, Created, EntityId - if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) - || (!allowOwner && name.equalsIgnoreCase("Owner")) - || name.equalsIgnoreCase("EntityId")) - continue; - - // Throw error if more than one row properties having different values match up to the same column. - if (!colFrequency.containsKey(col)) - { - colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); - } - else - { - if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) - { - throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); - } - } - - // We want a map using the DbTable column names as keys, so figure out the right name to use - String dbName = queryToDb.getOrDefault(name, name); - rowStripped.put(dbName, entry.getValue()); - } - } - } - - convertTypes(user, container, rowStripped); - validateUpdateRow(rowStripped); - - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (rowContainer == null) - { - throw new ValidationException("Unknown container: " + row.get("container")); - } - else - { - Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (null != oldContainer && !rowContainer.equals(oldContainer)) - throw new UnauthorizedException("The row is from the wrong container."); - } - } - - Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); - - //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes - //the primary key columns as well as those marked read-only. So we can't simply return the map returned - //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. - row.putAll(updatedRow); - return row; - } - - protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException - { - DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); - List validators = ColumnValidators.create(column, dp); - for (ColumnValidator v : validators) - { - String msg = v.validate(-1, value, _validatorContext, providedValue); - if (msg != null) - throw new ValidationException(msg, column.getName()); - } - } - - protected void validateInsertRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - Object value = row.get(col.getColumnName()); - - // Check required values aren't null or empty - if (null == value || value instanceof String s && s.isEmpty()) - { - if (!col.isAutoIncrement() && col.isRequired() && - !getAutoPopulatedColumns().contains(col.getName()) && - col.getJdbcDefaultValue() == null) - { - throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); - } - } - else - { - validateValue(col, value, null); - } - } - } - - protected void validateUpdateRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - // Only validate incoming values - if (row.containsKey(col.getColumnName())) - { - Object value = row.get(col.getColumnName()); - validateValue(col, value, null); - } - } - } - - protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) - throws SQLException, ValidationException - { - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - - // The lsid may be null for the row until a property has been inserted - String lsid = null; - if (objectUriCol != null) - lsid = (String) oldRow.get(objectUriCol.getName()); - - List tableProperties = new ArrayList<>(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - tableProperties.add(pd); - - // clear out the old value if it exists and is contained in the new row (it may be incoming as null) - if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) - OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); - - Object value = getPropertyValue(row, pd); - if (value != null) - newValues.put(pd.getPropertyURI(), value); - } - - // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() - newValues.put(objectUriCol.getName(), lsid); - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); - - // Update the lsid in the row: the lsid may have not existed in the row before the update. - lsid = collector.getLsid(); - row.put(objectUriCol.getName(), lsid); - } - - // Get lsid value if it hasn't been set. - // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) - if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) - { - String objectUriColName = updateableTableInfo.getObjectURIColumnName(); - if (objectUriColName != null) - lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); - } - - // handle vocabulary properties - if (lsid != null) - { - for (Map.Entry rowEntry : row.entrySet()) - { - String colName = rowEntry.getKey(); - Object value = rowEntry.getValue(); - - ColumnInfo col = getQueryTable().getColumn(colName); - if (col instanceof PropertyColumn propCol) - { - PropertyDescriptor pd = propCol.getPropertyDescriptor(); - if (pd.isVocabulary() && !tableProperties.contains(pd)) - { - OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); - } - } - } - } - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - - checkDuplicateUpdate(keys); - - return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) - } - - private static class LsidCollector implements OntologyManager.RowCallback - { - private String _lsid; - - @Override - public void rowProcessed(Map row, String lsid) - { - if (_lsid != null) - { - throw new IllegalStateException("Only expected a single LSID"); - } - _lsid = lsid; - } - - public String getLsid() - { - if (_lsid == null) - { - throw new IllegalStateException("No LSID returned"); - } - return _lsid; - } - } - - // Get value from row map where the keys are column names. - private Object getPropertyValue(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return row.get(pd.getName()); - - if (row.containsKey(pd.getLabel())) - return row.get(pd.getLabel()); - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return row.get(alias); - } - - return null; - } - - // Checks a value exists in the row map (value may be null) - private boolean hasProperty(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return true; - - if (row.containsKey(pd.getLabel())) - return true; - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return true; - } - - return false; - } - - @Override - protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException - { - if (oldRowMap == null) - return null; - - aliasColumns(_columnMapping, oldRowMap); - - if (container != null && getDbTable().getColumn("container") != null) - { - // UNDONE: 9077: check container permission on each row before delete - Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); - if (null != rowContainer && !container.equals(rowContainer)) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (container.allowRowMutationForContainer(rowContainer)) - container = rowContainer; - else - throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); - } - } - - _delete(container, oldRowMap); - return oldRowMap; - } - - protected void _delete(Container c, Map row) throws InvalidKeyException - { - ColumnInfo objectUriCol = getObjectUriColumn(); - if (objectUriCol != null) - { - String lsid = (String)row.get(objectUriCol.getName()); - if (lsid != null) - { - OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); - if (oo != null) - OntologyManager.deleteProperties(c, oo.getObjectId()); - } - } - Table.delete(getDbTable(), getKeys(row, c)); - } - - // classes should override this method if they need to do more work than delete all the rows from the table - // this implementation will delete all rows from the table for the given container as well as delete - // any properties associated with the table - @Override - protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException - { - // get rid of the properties for this table - if (null != getObjectUriColumn()) - { - SQLFragment lsids = new SQLFragment() - .append("SELECT t.").append(getObjectUriColumn().getColumnName()) - .append(" FROM ").append(getDbTable(), "t") - .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); - if (null != getDbTable().getColumn("container")) - { - lsids.append(" AND t.Container = ?"); - lsids.add(container.getId()); - } - - OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); - } - - // delete all the rows in this table, scoping to the container if the column - // is available - if (null != getDbTable().getColumn("container")) - return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); - - return Table.delete(getDbTable()); - } - - protected Object[] getKeys(Map map, Container container) throws InvalidKeyException - { - //build an array of pk values based on the table info - TableInfo table = getDbTable(); - List pks = table.getPkColumns(); - Object[] pkVals = new Object[pks.size()]; - - if (map == null || map.isEmpty()) - return pkVals; - - for (int idx = 0; idx < pks.size(); ++idx) - { - ColumnInfo pk = pks.get(idx); - Object pkValue = map.get(pk.getName()); - // Check the type and coerce if needed - if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) - { - try - { - pkValue = pk.convert(pkValue); - } - catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } - } - pkVals[idx] = pkValue; - if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) - { - pkVals[idx] = container; - } - if(null == pkVals[idx]) - { - throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); - } - } - return pkVals; - } - - private Map _missingValues = null; - private Container _missingValuesContainer; - - protected boolean validMissingValue(Container c, String mv) - { - if (null == c) - return false; - if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) - { - _missingValues = MvUtil.getIndicatorsAndLabels(c); - _missingValuesContainer = c; - } - return _missingValues.containsKey(mv); - } - - protected TableInfo getTableInfoForConversion() - { - return getDbTable(); - } - - final protected void convertTypes(User user, Container c, Map row) throws ValidationException - { - convertTypes(user, c, row, getTableInfoForConversion(), null); - } - - // TODO Path->FileObject - // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? - protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException - { - for (ColumnInfo col : t.getColumns()) - { - if (col.isMvIndicatorColumn()) - continue; - boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); - if (!isColumnPresent) - continue; - - Object value = row.get(col.getName()); - - /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. - * If you update this code, check that code as well. */ - if (col.isMvEnabled()) - { - if (value instanceof String s && StringUtils.isEmpty(s)) - value = null; - - Object mvObj = row.get(col.getMvColumnName().getName()); - String mv = Objects.toString(mvObj, null); - if (StringUtils.isEmpty(mv)) - mv = null; - - if (null != mv) - { - if (!validMissingValue(c, mv)) - throw new ValidationException("Value is not a valid missing value indicator: " + mv); - } - else if (null != value) - { - String s = Objects.toString(value, null); - if (validMissingValue(c, s)) - { - mv = s; - value = null; - } - } - row.put(col.getMvColumnName().getName(), mv); - } - - value = convertColumnValue(col, value, user, c, fileLinkDirPath); - row.put(col.getName(), value); - } - } - - protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException - { - // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() - // improve handling of conversion errors - try - { - if (PropertyType.FILE_LINK == col.getPropertyType()) - { - if ((value instanceof MultipartFile || value instanceof AttachmentFile)) - { - FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); - value = fl.toNioPathForRead().toString(); - } - return ExpDataFileConverter.convert(value); - } - return col.getConvertFn().convert(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw new ValidationException(e.getMessage()); - } - catch (ConversionException e) - { - String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); - throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); - } - catch (QueryUpdateServiceException e) - { - throw new ValidationException("Save file link failed: " + col.getName()); - } - } - - /** - * Override this method to alter the row before insert or update. - * For example, you can automatically adjust certain column values based on context. - * @param container The current container - * @param row The row data - * @param user The current user - * @param clazz A permission class to test - */ - protected void setSpecialColumns(Container container, Map row, User user, Class clazz) - { - if (null != container) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); - if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) - { - row.put("container", rowContainer.getId()); //normalize to container ID - return; //accept the row-provided value - } - } - row.put("container", container.getId()); - } - } - - protected boolean hasAttachmentProperties() - { - Domain domain = getDomain(); - if (null != domain) - { - for (DomainProperty dp : domain.getProperties()) - if (null != dp && isAttachmentProperty(dp)) - return true; - } - return false; - } - - protected boolean isAttachmentProperty(@NotNull DomainProperty dp) - { - PropertyDescriptor pd = dp.getPropertyDescriptor(); - return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); - } - - protected boolean isAttachmentProperty(String name) - { - DomainProperty dp = getDomain().getPropertyByName(name); - if (dp != null) - return isAttachmentProperty(dp); - return false; - } - - protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException - { - if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) - { - boolean hasContainerField = false; - for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) - { - String fieldName = columnDescriptor.getColumnName(); - if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) - { - hasContainerField = true; - break; - } - } - if (!hasContainerField) - context.setCrossFolderImport(false); - } - } -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.ConvertUtils; +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveMapWrapper; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MvUtil; +import org.labkey.api.data.Parameter; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.validator.ColumnValidator; +import org.labkey.api.data.validator.ColumnValidators; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.OntologyObject; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.reader.ColumnDescriptor; +import org.labkey.api.reader.DataLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.CachingSupplier; +import org.labkey.api.util.Pair; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.IOException; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Supplier; + +/** + * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. + * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the + * hard table columns. + */ +public class DefaultQueryUpdateService extends AbstractQueryUpdateService +{ + private final TableInfo _dbTable; + private DomainUpdateHelper _helper = null; + /** + * Map from DbTable column names to QueryTable column names, if they have been aliased + */ + protected Map _columnMapping = Collections.emptyMap(); + /** + * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process + */ + private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); + private final ValidatorContext _validatorContext; + private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); + + public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) + { + super(queryTable); + _dbTable = dbTable; + + if (queryTable.getUserSchema() == null) + throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); + + _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); + } + + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) + { + this(queryTable, dbTable); + _helper = helper; + } + + /** + * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased + */ + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) + { + this(queryTable, dbTable); + _columnMapping = columnMapping; + } + + protected TableInfo getDbTable() + { + return _dbTable; + } + + protected Domain getDomain() + { + return _helper == null ? null : _helper.getDomain(); + } + + protected ColumnInfo getObjectUriColumn() + { + return _helper == null ? null : _helper.getObjectUriColumn(); + } + + protected String createObjectURI() + { + return _helper == null ? null : _helper.createObjectURI(); + } + + protected Iterable getPropertyColumns() + { + return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); + } + + protected Map getColumnMapping() + { + return _columnMapping; + } + + /** + * Returns the container that the domain is defined + */ + protected Container getDomainContainer(Container c) + { + return _helper == null ? c : _helper.getDomainContainer(c); + } + + /** + * Returns the container to insert/update values into + */ + protected Container getDomainObjContainer(Container c) + { + return _helper == null ? c : _helper.getDomainObjContainer(c); + } + + protected Set getAutoPopulatedColumns() + { + return Table.AUTOPOPULATED_COLUMN_NAMES; + } + + public interface DomainUpdateHelper + { + Domain getDomain(); + + ColumnInfo getObjectUriColumn(); + + String createObjectURI(); + + // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. + Iterable getPropertyColumns(); + + Container getDomainContainer(Container c); + + Container getDomainObjContainer(Container c); + } + + public class ImportHelper implements OntologyManager.ImportHelper + { + ImportHelper() + { + } + + @Override + public String beforeImportObject(Map map) + { + ColumnInfo objectUriCol = getObjectUriColumn(); + + // Get existing Lsid + String lsid = (String) map.get(objectUriCol.getName()); + if (lsid != null) + return lsid; + + // Generate a new Lsid + lsid = createObjectURI(); + map.put(objectUriCol.getName(), lsid); + return lsid; + } + + @Override + public void afterBatchInsert(int currentRow) + { + } + + @Override + public void updateStatistics(int currentRow) + { + } + } + + @Override + protected Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, keys); + Map row = _select(container, getKeys(keys, container)); + + //PostgreSQL includes a column named _row for the row index, but since this is selecting by + //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it + if (null != row) + { + if (row instanceof ArrayListMap) + ((ArrayListMap) row).getFindMap().remove("_row"); + else + row.remove("_row"); + } + + return row; + } + + protected Map _select(Container container, Object[] keys) throws ConversionException + { + TableInfo table = getDbTable(); + Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); + + Map row = new TableSelector(table).getMap(typedParameters); + + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) + { + String lsid = (String) row.get(objectUriCol.getName()); + if (lsid != null) + { + Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); + if (!propertyValues.isEmpty()) + { + // convert PropertyURI->value map into "Property name"->value map + Map propertyMap = domain.createImportMap(false); + for (Map.Entry entry : propertyValues.entrySet()) + { + String propertyURI = entry.getKey(); + DomainProperty dp = propertyMap.get(propertyURI); + PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; + if (pd != null) + row.put(pd.getName(), entry.getValue()); + } + } + } + // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been + // inserted before the table was made extensible), but make sure that we got an LSID field + // when fetching the row + else if (!row.containsKey(objectUriCol.getName())) + { + throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); + } + } + + return row; + } + + + private Object[] convertToTypedValues(Object[] keys, List cols) + { + Object[] typedParameters = new Object[keys.length]; + int t = 0; + for (int i = 0; i < keys.length; i++) + { + if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) + { + typedParameters[t++] = keys[i]; + continue; + } + Object v = keys[i]; + JdbcType type = cols.get(i).getJdbcType(); + if (v instanceof String) + v = type.convert(v); + Parameter.TypedValue tv = new Parameter.TypedValue(v, type); + typedParameters[t++] = tv; + } + return typedParameters; + } + + + @Override + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, row); + convertTypes(user, container, row); + setSpecialColumns(container, row, user, InsertPermission.class); + validateInsertRow(row); + return _insert(user, container, row); + } + + protected Map _insert(User user, Container c, Map row) + throws SQLException, ValidationException + { + assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + List pds = new ArrayList<>(); + Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + pds.add(pd); + Object value = getPropertyValue(row, pd); + values.put(pd.getPropertyURI(), value); + } + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); + String lsid = collector.getLsid(); + + // Add the new lsid to the row map. + row.put(objectUriCol.getName(), lsid); + } + + return Table.insert(user, getDbTable(), row); + } + catch (RuntimeValidationException e) + { + throw e.getValidationException(); + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + } + + @Override + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return updateRow(user, container, row, oldRow, false, false); + } + + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); + + // Flip the key/value pairs around for easy lookup + Map queryToDb = new CaseInsensitiveHashMap<>(); + for (Map.Entry entry : _columnMapping.entrySet()) + { + queryToDb.put(entry.getValue(), entry.getKey()); + } + + setSpecialColumns(container, row, user, UpdatePermission.class); + + Map tableAliasesMap = _tableMapSupplier.get(); + Map> colFrequency = new HashMap<>(); + + //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table + for (Map.Entry entry: row.entrySet()) + { + if (!rowStripped.containsKey(entry.getKey())) + { + ColumnInfo col = getQueryTable().getColumn(entry.getKey()); + + if (null == col) + { + col = tableAliasesMap.get(entry.getKey()); + } + + if (null != col) + { + final String name = col.getName(); + + // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. + if (col.isReadOnly() || col.isCalculated()) + continue; + + //when updating a row, we should strip the following fields, as they are + //automagically maintained by the table layer, and should not be allowed + //to change once the record exists. + //unfortunately, the Table.update() method doesn't strip these, so we'll + //do that here. + // Owner, CreatedBy, Created, EntityId + if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) + || (!allowOwner && name.equalsIgnoreCase("Owner")) + || name.equalsIgnoreCase("EntityId")) + continue; + + // Throw error if more than one row properties having different values match up to the same column. + if (!colFrequency.containsKey(col)) + { + colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); + } + else + { + if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) + { + throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); + } + } + + // We want a map using the DbTable column names as keys, so figure out the right name to use + String dbName = queryToDb.getOrDefault(name, name); + rowStripped.put(dbName, entry.getValue()); + } + } + } + + convertTypes(user, container, rowStripped); + validateUpdateRow(rowStripped); + + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (rowContainer == null) + { + throw new ValidationException("Unknown container: " + row.get("container")); + } + else + { + Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (null != oldContainer && !rowContainer.equals(oldContainer)) + throw new UnauthorizedException("The row is from the wrong container."); + } + } + + Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); + + //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes + //the primary key columns as well as those marked read-only. So we can't simply return the map returned + //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. + row.putAll(updatedRow); + return row; + } + + protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException + { + DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); + List validators = ColumnValidators.create(column, dp); + for (ColumnValidator v : validators) + { + String msg = v.validate(-1, value, _validatorContext, providedValue); + if (msg != null) + throw new ValidationException(msg, column.getName()); + } + } + + protected void validateInsertRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + Object value = row.get(col.getColumnName()); + + // Check required values aren't null or empty + if (null == value || value instanceof String s && s.isEmpty()) + { + if (!col.isAutoIncrement() && col.isRequired() && + !getAutoPopulatedColumns().contains(col.getName()) && + col.getJdbcDefaultValue() == null) + { + throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); + } + } + else + { + validateValue(col, value, null); + } + } + } + + protected void validateUpdateRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + // Only validate incoming values + if (row.containsKey(col.getColumnName())) + { + Object value = row.get(col.getColumnName()); + validateValue(col, value, null); + } + } + } + + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) + throws SQLException, ValidationException + { + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + + // The lsid may be null for the row until a property has been inserted + String lsid = null; + if (objectUriCol != null) + lsid = (String) oldRow.get(objectUriCol.getName()); + + List tableProperties = new ArrayList<>(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + tableProperties.add(pd); + + // clear out the old value if it exists and is contained in the new row (it may be incoming as null) + if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) + OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); + + Object value = getPropertyValue(row, pd); + if (value != null) + newValues.put(pd.getPropertyURI(), value); + } + + // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() + newValues.put(objectUriCol.getName(), lsid); + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); + + // Update the lsid in the row: the lsid may have not existed in the row before the update. + lsid = collector.getLsid(); + row.put(objectUriCol.getName(), lsid); + } + + // Get lsid value if it hasn't been set. + // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) + if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) + { + String objectUriColName = updateableTableInfo.getObjectURIColumnName(); + if (objectUriColName != null) + lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); + } + + // handle vocabulary properties + if (lsid != null) + { + for (Map.Entry rowEntry : row.entrySet()) + { + String colName = rowEntry.getKey(); + Object value = rowEntry.getValue(); + + ColumnInfo col = getQueryTable().getColumn(colName); + if (col instanceof PropertyColumn propCol) + { + PropertyDescriptor pd = propCol.getPropertyDescriptor(); + if (pd.isVocabulary() && !tableProperties.contains(pd)) + { + OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); + } + } + } + } + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + + checkDuplicateUpdate(keys); + + return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) + } + + private static class LsidCollector implements OntologyManager.RowCallback + { + private String _lsid; + + @Override + public void rowProcessed(Map row, String lsid) + { + if (_lsid != null) + { + throw new IllegalStateException("Only expected a single LSID"); + } + _lsid = lsid; + } + + public String getLsid() + { + if (_lsid == null) + { + throw new IllegalStateException("No LSID returned"); + } + return _lsid; + } + } + + // Get value from row map where the keys are column names. + private Object getPropertyValue(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return row.get(pd.getName()); + + if (row.containsKey(pd.getLabel())) + return row.get(pd.getLabel()); + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return row.get(alias); + } + + return null; + } + + // Checks a value exists in the row map (value may be null) + private boolean hasProperty(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return true; + + if (row.containsKey(pd.getLabel())) + return true; + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return true; + } + + return false; + } + + @Override + protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException + { + if (oldRowMap == null) + return null; + + aliasColumns(_columnMapping, oldRowMap); + + if (container != null && getDbTable().getColumn("container") != null) + { + // UNDONE: 9077: check container permission on each row before delete + Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); + if (null != rowContainer && !container.equals(rowContainer)) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (container.allowRowMutationForContainer(rowContainer)) + container = rowContainer; + else + throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); + } + } + + _delete(container, oldRowMap); + return oldRowMap; + } + + protected void _delete(Container c, Map row) throws InvalidKeyException + { + ColumnInfo objectUriCol = getObjectUriColumn(); + if (objectUriCol != null) + { + String lsid = (String)row.get(objectUriCol.getName()); + if (lsid != null) + { + OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); + if (oo != null) + OntologyManager.deleteProperties(c, oo.getObjectId()); + } + } + Table.delete(getDbTable(), getKeys(row, c)); + } + + // classes should override this method if they need to do more work than delete all the rows from the table + // this implementation will delete all rows from the table for the given container as well as delete + // any properties associated with the table + @Override + protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException + { + // get rid of the properties for this table + if (null != getObjectUriColumn()) + { + SQLFragment lsids = new SQLFragment() + .append("SELECT t.").append(getObjectUriColumn().getColumnName()) + .append(" FROM ").append(getDbTable(), "t") + .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); + if (null != getDbTable().getColumn("container")) + { + lsids.append(" AND t.Container = ?"); + lsids.add(container.getId()); + } + + OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); + } + + // delete all the rows in this table, scoping to the container if the column + // is available + if (null != getDbTable().getColumn("container")) + return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); + + return Table.delete(getDbTable()); + } + + protected Object[] getKeys(Map map, Container container) throws InvalidKeyException + { + //build an array of pk values based on the table info + TableInfo table = getDbTable(); + List pks = table.getPkColumns(); + Object[] pkVals = new Object[pks.size()]; + + if (map == null || map.isEmpty()) + return pkVals; + + for (int idx = 0; idx < pks.size(); ++idx) + { + ColumnInfo pk = pks.get(idx); + Object pkValue = map.get(pk.getName()); + // Check the type and coerce if needed + if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) + { + try + { + pkValue = pk.convert(pkValue); + } + catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } + } + pkVals[idx] = pkValue; + if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) + { + pkVals[idx] = container; + } + if(null == pkVals[idx]) + { + throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); + } + } + return pkVals; + } + + private Map _missingValues = null; + private Container _missingValuesContainer; + + protected boolean validMissingValue(Container c, String mv) + { + if (null == c) + return false; + if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) + { + _missingValues = MvUtil.getIndicatorsAndLabels(c); + _missingValuesContainer = c; + } + return _missingValues.containsKey(mv); + } + + final protected void convertTypes(User user, Container c, Map row) throws ValidationException + { + convertTypes(user, c, row, getDbTable(), null); + } + + // TODO Path->FileObject + // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? + protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException + { + for (ColumnInfo col : t.getColumns()) + { + if (col.isMvIndicatorColumn()) + continue; + boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); + if (!isColumnPresent) + continue; + + Object value = row.get(col.getName()); + + /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. + * If you update this code, check that code as well. */ + if (col.isMvEnabled()) + { + if (value instanceof String s && StringUtils.isEmpty(s)) + value = null; + + Object mvObj = row.get(col.getMvColumnName().getName()); + String mv = Objects.toString(mvObj, null); + if (StringUtils.isEmpty(mv)) + mv = null; + + if (null != mv) + { + if (!validMissingValue(c, mv)) + throw new ValidationException("Value is not a valid missing value indicator: " + mv); + } + else if (null != value) + { + String s = Objects.toString(value, null); + if (validMissingValue(c, s)) + { + mv = s; + value = null; + } + } + row.put(col.getMvColumnName().getName(), mv); + } + + value = convertColumnValue(col, value, user, c, fileLinkDirPath); + row.put(col.getName(), value); + } + } + + protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException + { + // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() + // improve handling of conversion errors + try + { + if (PropertyType.FILE_LINK == col.getPropertyType()) + { + if ((value instanceof MultipartFile || value instanceof AttachmentFile)) + { + FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); + value = fl.toNioPathForRead().toString(); + } + return ExpDataFileConverter.convert(value); + } + return col.getConvertFn().convert(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw new ValidationException(e.getMessage()); + } + catch (ConversionException e) + { + String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); + throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); + } + catch (QueryUpdateServiceException e) + { + throw new ValidationException("Save file link failed: " + col.getName()); + } + } + + /** + * Override this method to alter the row before insert or update. + * For example, you can automatically adjust certain column values based on context. + * @param container The current container + * @param row The row data + * @param user The current user + * @param clazz A permission class to test + */ + protected void setSpecialColumns(Container container, Map row, User user, Class clazz) + { + if (null != container) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); + if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) + { + row.put("container", rowContainer.getId()); //normalize to container ID + return; //accept the row-provided value + } + } + row.put("container", container.getId()); + } + } + + protected boolean hasAttachmentProperties() + { + Domain domain = getDomain(); + if (null != domain) + { + for (DomainProperty dp : domain.getProperties()) + if (null != dp && isAttachmentProperty(dp)) + return true; + } + return false; + } + + protected boolean isAttachmentProperty(@NotNull DomainProperty dp) + { + PropertyDescriptor pd = dp.getPropertyDescriptor(); + return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); + } + + protected boolean isAttachmentProperty(String name) + { + DomainProperty dp = getDomain().getPropertyByName(name); + if (dp != null) + return isAttachmentProperty(dp); + return false; + } + + protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException + { + if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) + { + boolean hasContainerField = false; + for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) + { + String fieldName = columnDescriptor.getColumnName(); + if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) + { + hasContainerField = true; + break; + } + } + if (!hasContainerField) + context.setCrossFolderImport(false); + } + } +} diff --git a/experiment/src/client/test/integration/DataClassCrud.ispec.ts b/experiment/src/client/test/integration/DataClassCrud.ispec.ts index 6f0d615f647..afc9cfef66d 100644 --- a/experiment/src/client/test/integration/DataClassCrud.ispec.ts +++ b/experiment/src/client/test/integration/DataClassCrud.ispec.ts @@ -422,9 +422,7 @@ describe('Duplicate IDs', () => { }], 'exp.data', dataType, topFolderOptions, editorUserOptions); const data1RowId = caseInsensitive(dataRows[0], 'rowId'); - const data1Lsid = caseInsensitive(dataRows[0], 'lsid'); const data2RowId = caseInsensitive(dataRows[1], 'rowId'); - const data2Lsid = caseInsensitive(dataRows[1], 'lsid'); // update data2 twice using updateRows, using rowId await server.post('query', 'updateRows', { @@ -445,23 +443,43 @@ describe('Duplicate IDs', () => { expect(errorResp['exception']).toBe('Duplicate key provided: ' + data2RowId); }); - // update data2 twice using updateRows, using lsid (data iterator) + // update date twice specifying the name across multiple partitions await server.post('query', 'updateRows', { schemaName: 'exp.data', queryName: dataType, rows: [{ description: 'update', - lsid: data1Lsid + name: dataName1 },{ description: 'update', - lsid: data2Lsid + rowId: data2RowId + },{ + description: 'update', + name: dataName1 + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + errorResp = JSON.parse(result.text); + expect(errorResp).toBe('a'); + expect(errorResp['exception']).toBe('Duplicate key provided: ' + dataName1); + }); + + // update date twice specifying the rowId across multiple partitions + await server.post('query', 'updateRows', { + schemaName: 'exp.data', + queryName: dataType, + rows: [{ + description: 'update', + rowId: data1RowId + },{ + description: 'update', + name: data2RowId },{ description: 'update', - lsid: data2Lsid + rowId: data1RowId }] }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { errorResp = JSON.parse(result.text); - expect(errorResp['exception']).toBe('Duplicate key provided: ' + data2Lsid); + expect(errorResp['exception']).toBe('Duplicate key provided: ' + data1RowId); }); errorResp = await ExperimentCRUDUtils.importData(server, "Name\tDescription\n" + dataName1 + "\tupdate\n" + dataName2 + "\tupdate\n" + dataName2 + "\tupdate", dataType, "UPDATE", topFolderOptions, editorUserOptions); @@ -821,6 +839,10 @@ describe('Multi Value Text Choice', () => { }); +const LSID_UPDATE_ERROR = 'LSID is no longer accepted as a key for data update. Specify a RowId or Name instead.'; +const LSID_MERGE_ERROR = 'LSID is no longer accepted as a key for data merge. Specify a RowId or Name instead.'; +const ROWID_MERGE_ERROR = 'RowId is not accepted when merging data. Specify only the data name instead.'; + describe('Data CRUD', () => { it ("Update using different key fields", async () => { @@ -838,20 +860,25 @@ describe('Data CRUD', () => { // insert 2 rows data, provide explicit names and a rowId = -1 const dataName1 = 'KeyData1'; const dataName2 = 'KeyData2'; + const dataName3 = 'KeyData3'; const inserted = await insertDataClassData([ { name: dataName1, description: 'original1', [fieldName]: 'val1', rowId: -1 }, { name: dataName2, description: 'original2', [fieldName]: 'val2', rowId: -1 }, + { name: dataName3, description: 'original3', [fieldName]: 'val3', rowId: -1 }, ], dataType, topFolderOptions); // verify both rows are inserted with correct name and rowId is not -1 for both rows, record the rowId and lsid for both rows expect(inserted[0].name).toBe(dataName1); expect(inserted[1].name).toBe(dataName2); + expect(inserted[2].name).toBe(dataName3); expect(inserted[0].rowId).not.toBe(-1); expect(inserted[1].rowId).not.toBe(-1); + expect(inserted[2].rowId).not.toBe(-1); const row1RowId = inserted[0].rowId; const row1Lsid = inserted[0].lsid; const row2RowId = inserted[1].rowId; const row2Lsid = inserted[1].lsid; + const row3RowId = inserted[2].rowId; const findRow = (rows: any[], rowId: number) => rows.find(r => caseInsensitive(r, 'RowId') === rowId); @@ -869,10 +896,32 @@ describe('Data CRUD', () => { expect(caseInsensitive(row2, 'description')).toBe('updByRowId2'); expect(caseInsensitive(row2, fieldName)).toBe('rowIdVal2'); - // update description and fieldName value for both rows using lsid as key, verify update is successful and data are updated correctly + // Error when supplying LSID without RowId or Name + // query api + await server.post('query', 'updateRows', { + schemaName: 'exp.data', + queryName: dataType, + rows: [ + { lsid: row1Lsid, description: 'updByLsid1', [fieldName]: 'lsidVal1' }, + { lsid: row2Lsid, description: 'updByLsid2', [fieldName]: 'lsidVal2' }, + ] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain(LSID_UPDATE_ERROR); + }); + // update from import + let importUpdateText = 'LSID\tDescription\t' + fieldName + '\n' + row1Lsid + '\timportUpd1\timportLsidVal1\n' + row2Lsid + '\timportUpd2\timportLsidVal2'; + let errorResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorResp.text.indexOf(LSID_UPDATE_ERROR) > -1).toBeTruthy(); + + // merge from import + errorResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorResp.text.indexOf(LSID_MERGE_ERROR) > -1).toBeTruthy(); + + // update using lsid (correct and incorrect, should both be ignored), as well as rowId, as key, should succeed, verify update is successful and data are updated correctly await ExperimentCRUDUtils.updateRows(server, [ - { lsid: row1Lsid, description: 'updByLsid1', [fieldName]: 'lsidVal1' }, - { lsid: row2Lsid, description: 'updByLsid2', [fieldName]: 'lsidVal2' }, + { lsid: row1Lsid, rowId: row1RowId, description: 'updByLsid1', [fieldName]: 'lsidVal1' }, + { lsid: row1Lsid /*wrong lsid, should be ignored anyways*/, rowId: row2RowId, description: 'updByLsid2', [fieldName]: 'lsidVal2' }, ], 'exp.data', dataType, topFolderOptions, editorUserOptions); rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); @@ -882,27 +931,34 @@ describe('Data CRUD', () => { expect(caseInsensitive(row1, fieldName)).toBe('lsidVal1'); expect(caseInsensitive(row2, 'description')).toBe('updByLsid2'); expect(caseInsensitive(row2, fieldName)).toBe('lsidVal2'); + expect(caseInsensitive(row2, 'lsid')).toBe(row2Lsid); // lsid should not be updated - // update description and fieldName value, one of the row use lsid as key, the other use rowId, verify update is successful and data are updated correctly + // update with different set of columns + // should use partitioned data iterator await ExperimentCRUDUtils.updateRows(server, [ - { lsid: row1Lsid, description: 'updMixed1', [fieldName]: 'mixedVal1' }, - { rowId: row2RowId, description: 'updMixed2', [fieldName]: 'mixedVal2' }, + { rowId: row1RowId, description: 'updMixed1', [fieldName]: 'mixedVal1' }, + { rowId: row2RowId, name: 'mixed_rename2', [fieldName]: 'mixedVal2' }, + { rowId: row3RowId, description: 'mixedVal3 desc' }, ], 'exp.data', dataType, topFolderOptions, editorUserOptions); - rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId, row3RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); row1 = findRow(rows, row1RowId); row2 = findRow(rows, row2RowId); + var row3 = findRow(rows, row3RowId); expect(caseInsensitive(row1, 'description')).toBe('updMixed1'); expect(caseInsensitive(row1, fieldName)).toBe('mixedVal1'); - expect(caseInsensitive(row2, 'description')).toBe('updMixed2'); + expect(caseInsensitive(row2, 'description')).toBe('updByLsid2'); expect(caseInsensitive(row2, fieldName)).toBe('mixedVal2'); + expect(caseInsensitive(row2, 'name')).toBe('mixed_rename2'); + expect(caseInsensitive(row3, 'description')).toBe('mixedVal3 desc'); + expect(caseInsensitive(row3, fieldName)).toBe('val3'); // fieldName value should not be updated for row3 - // update names of both rows using lsid as key, verify update is successful and names are updated correctly + // update names of both rows using lsid (ignored) an rowId as key, verify update is successful and names are updated correctly const newName1 = 'RenamedByLsid1'; const newName2 = 'RenamedByLsid2'; await ExperimentCRUDUtils.updateRows(server, [ - { lsid: row1Lsid, name: newName1 }, - { lsid: row2Lsid, name: newName2 }, + { lsid: "BAD", rowId: row1RowId, name: newName1 }, + { lsid: row1Lsid /*wrong*/, rowId: row2RowId, name: newName2 }, ], 'exp.data', dataType, topFolderOptions, editorUserOptions); rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, 'RowId,Name', topFolderOptions, adminOptions); @@ -911,7 +967,7 @@ describe('Data CRUD', () => { expect(caseInsensitive(row1, 'Name')).toBe(newName1); expect(caseInsensitive(row2, 'Name')).toBe(newName2); - // update names of both rows using rowId as key, verify update is successful and names are updated correctly + // update names of both rows using just rowId as key, verify update is successful and names are updated correctly const newName3 = 'RenamedByRowId1'; const newName4 = 'RenamedByRowId2'; await ExperimentCRUDUtils.updateRows(server, [ @@ -926,7 +982,7 @@ describe('Data CRUD', () => { expect(caseInsensitive(row2, 'Name')).toBe(newName4); // update description and fieldName value from Import with update, the import columns contains name field, verify update is successful and data are updated correctly - const importUpdateText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\timportUpd1\timportVal1\n' + newName4 + '\timportUpd2\timportVal2'; + importUpdateText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\timportUpd1\timportVal1\n' + newName4 + '\timportUpd2\timportVal2'; const updateResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, 'UPDATE', topFolderOptions, editorUserOptions); expect(updateResp.body.success).toBe(true); @@ -938,6 +994,12 @@ describe('Data CRUD', () => { expect(caseInsensitive(row2, 'description')).toBe('importUpd2'); expect(caseInsensitive(row2, fieldName)).toBe('importVal2'); + // Error when supplying RowId during MERGE, verify import fails + errorResp = await ExperimentCRUDUtils.importData(server, "RowId\tDescription\n" + row3RowId + "\tupdate\n", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorResp.text).toContain(ROWID_MERGE_ERROR); + errorResp = await ExperimentCRUDUtils.importData(server, "RowId\tName\tDescription\n" + row3RowId + "\t" + dataName3 + "\tupdate\n", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorResp.text).toContain(ROWID_MERGE_ERROR); + // update description and fieldName value from Import with merge. at the same time create a new data. the import columns contain name field, verify update and insert is successful const newDataName = 'MergedNewData'; const importMergeText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\tmergeUpd1\tmergeVal1\n' + newName4 + '\tmergeUpd2\tmergeVal2\n' + newDataName + '\tmergeNew\tmergeNewVal'; @@ -957,6 +1019,60 @@ describe('Data CRUD', () => { expect(caseInsensitive(newDataRow, 'Name')).toBe(newDataName); expect(caseInsensitive(newDataRow, 'description')).toBe('mergeNew'); expect(caseInsensitive(newDataRow, fieldName)).toBe('mergeNewVal'); + + // Update from file, using rowId as key, verify update should be successful and data are updated correctly + const importUpdateRowIdText = 'RowId\tDescription\t' + fieldName + '\n' + row1RowId + '\timportUpdByRowId1\timportValByRowId1\n' + row2RowId + '\timportUpdByRowId2\timportValByRowId2'; + const updateByRowIdResp = await ExperimentCRUDUtils.importData(server, importUpdateRowIdText, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(updateByRowIdResp.body.success).toBe(true); + + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + row1 = findRow(rows, row1RowId); + row2 = findRow(rows, row2RowId); + expect(caseInsensitive(row1, 'description')).toBe('importUpdByRowId1'); + expect(caseInsensitive(row1, fieldName)).toBe('importValByRowId1'); + expect(caseInsensitive(row2, 'description')).toBe('importUpdByRowId2'); + expect(caseInsensitive(row2, fieldName)).toBe('importValByRowId2'); + + // update from file, provide rowId and an updated name, verify name is successfully updated + const newNameByRowId1 = 'RenamedByRowId1Import'; + const newNameByRowId2 = 'RenamedByRowId2Import'; + const importUpdateRowIdNameText = 'RowId\tName\tDescription\n' + row1RowId + '\t' + newNameByRowId1 + '\timportUpdByRowId1-2\n' + row2RowId + '\t' + newNameByRowId2 + '\timportUpdByRowId2-2\n'; + const updateByRowIdNameResp = await ExperimentCRUDUtils.importData(server, importUpdateRowIdNameText, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(updateByRowIdNameResp.body.success).toBe(true); + + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + row1 = findRow(rows, row1RowId); + row2 = findRow(rows, row2RowId); + expect(caseInsensitive(row1, 'Name')).toBe(newNameByRowId1); + expect(caseInsensitive(row1, 'description')).toBe('importUpdByRowId1-2'); + expect(caseInsensitive(row2, 'Name')).toBe(newNameByRowId2); + expect(caseInsensitive(row2, 'description')).toBe('importUpdByRowId2-2'); + + // verify data rowId needs to match provided dataclass type + const emptyDataClass = dataType + "Empty"; + await server.post('property', 'createDomain', { + kind: 'DataClass', + domainDesign: { name: emptyDataClass, fields: [{ name: fieldName }] }, + options: { name: dataType } + }, { ...topFolderOptions, ...designerReaderOptions }).expect(successfulResponse); + + // using query api, update using rowId for data that doesn't exist on the new dataclass should fail. + await server.post('query', 'updateRows', { + schemaName: 'exp.data', + queryName: emptyDataClass, + rows: [{ + description: 'update', + rowId: row3RowId + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain('Data not found for [' + row3RowId + ']'); + }); + + // using update from file, verify update using rowId for data that doesn't exist on this datacalss should fail. + errorResp = await ExperimentCRUDUtils.importData(server, "RowId\tDescription\n" + row3RowId + "\tupdate\n", emptyDataClass, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorResp.text).toContain('Data not found for [' + row3RowId + ']'); + }); }); diff --git a/experiment/src/org/labkey/experiment/ExpDataIterators.java b/experiment/src/org/labkey/experiment/ExpDataIterators.java index b14dacc468e..68045b61013 100644 --- a/experiment/src/org/labkey/experiment/ExpDataIterators.java +++ b/experiment/src/org/labkey/experiment/ExpDataIterators.java @@ -870,9 +870,6 @@ public static void derive(User user, Container container, DataIterator di, boole ExpDataIterators.DerivationDataIteratorBuilder ddib = new ExpDataIterators.DerivationDataIteratorBuilder(di, container, user, isSample, dataType, skipAliquot, true); DataIteratorContext context = new DataIteratorContext(); context.setInsertOption(QueryUpdateService.InsertOption.UPDATE); - Map configParameters = new HashMap<>(); - configParameters.put(ExperimentService.QueryOptions.UseLsidForUpdate, true); - context.setConfigParameters(configParameters); DataIterator derive = ddib.getDataIterator(context); new Pump(derive, context).run(); if (context.getErrors().hasErrors()) @@ -915,7 +912,7 @@ public DataIterator getDataIterator(DataIteratorContext context) else if (_isSample) di = new SampleUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); else - di = new DataUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); + di = new DataUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents);// return LoggingDataIterator.wrap(di); } @@ -1186,7 +1183,7 @@ public boolean next() throws BatchValidationException // For each iteration, collect the parent col values if (hasNext) { - String lsid = (String) get(_lsidCol); + String lsid = (String) get(_lsidCol); // why lsid?, insert or merge String name = null; if (_nameCol != null) name = (String) get(_nameCol); @@ -1449,9 +1446,10 @@ else if (o instanceof Number) private static class DataUpdateDerivationDataIterator extends DerivationDataIteratorBase { - // Map from Data name to Set of (parentColName, parentName) - final Map>> _parentNames; - final boolean _useLsid; + // Map from Data key (RowId or name) to Set of (parentColName, parentName) + final Map>> _parentNames; + final Integer _rowIdCol; + final boolean _useRowId; protected DataUpdateDerivationDataIterator(DataIterator di, DataIteratorContext context, Container container, User user, ExpObject currentDataType, boolean checkRequiredParent) { @@ -1459,7 +1457,8 @@ protected DataUpdateDerivationDataIterator(DataIterator di, DataIteratorContext Map map = DataIteratorUtil.createColumnNameMap(di); _parentNames = new LinkedHashMap<>(); - _useLsid = map.containsKey("lsid") && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); + _rowIdCol = map.getOrDefault(ExpDataTable.Column.RowId.name(), -1); + _useRowId = map.containsKey(ExpDataTable.Column.RowId.name()); } @Override @@ -1474,9 +1473,15 @@ public boolean next() throws BatchValidationException // For each iteration, collect the parent col values if (hasNext) { - String key = null; - if (_useLsid && _lsidCol != null) - key = (String) get(_lsidCol); + Object key = null; + if (_useRowId && _rowIdCol != null) + { + key = get(_rowIdCol); + if (key instanceof String k) + key = Long.parseLong(k); + else + key = asLong(key); + } else if (_nameCol != null) key = (String) get(_nameCol); @@ -1504,9 +1509,11 @@ else if (_nameCol != null) Map dataCache = new LongHashMap<>(); List runRecords = new ArrayList<>(); - for (String key : _parentNames.keySet()) + for (Object key : _parentNames.keySet()) { - ExpData expData = _useLsid ? ExperimentService.get().getExpData(key) : getDataClass().getData(_container, key); + ExpData expData = _useRowId + ? ExperimentService.get().getExpData((Long) key) + : getDataClass().getData(_container, (String) key); if (expData == null) continue; @@ -2428,15 +2435,8 @@ public DataIterator getDataIterator(DataIteratorContext context) } else { - if (isMergeOrUpdate) - { - boolean isUpdateUsingLsid = isUpdateOnly && - colNameMap.containsKey(ExpDataTable.Column.LSID.name()) && - context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); - - if (isUpdateUsingLsid && !canUpdateNames) - dontUpdate.add(ExpDataTable.Column.Name.name()); - } + if (isUpdateOnly && !canUpdateNames) + dontUpdate.add(ExpDataTable.Column.Name.name()); } // Since we support detailed audit logging, add the ExistingRecordDataIterator here just before TableInsertDataIterator. @@ -2700,29 +2700,20 @@ private MultiDataTypeCrossProjectDataIterator(DataIterator di, DataIteratorConte FieldKey dataKey; boolean isNumeric; - if (_isSamples) - { - var foundId = RowId.namesAndLabels().stream() - .filter(map::containsKey) - .findFirst(); + var foundId = RowId.namesAndLabels().stream() + .filter(map::containsKey) + .findFirst(); - if (foundId.isPresent()) - { - index = map.get(foundId.get()); - dataKey = RowId.fieldKey(); - isNumeric = true; - } - else - { - index = map.getOrDefault(Name.name(), -1); - dataKey = Name.fieldKey(); - isNumeric = false; - } + if (foundId.isPresent()) + { + index = map.get(foundId.get()); + dataKey = RowId.fieldKey(); + isNumeric = true; } else { - index = map.getOrDefault(ExpDataTable.Column.Name.name(), -1); - dataKey = ExpDataTable.Column.Name.fieldKey(); + index = map.getOrDefault(Name.name(), -1); + dataKey = Name.fieldKey(); isNumeric = false; } diff --git a/experiment/src/org/labkey/experiment/ExperimentModule.java b/experiment/src/org/labkey/experiment/ExperimentModule.java index 3808f929df0..90d22e0091a 100644 --- a/experiment/src/org/labkey/experiment/ExperimentModule.java +++ b/experiment/src/org/labkey/experiment/ExperimentModule.java @@ -1,1173 +1,1173 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment; - -import org.apache.commons.lang3.math.NumberUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONObject; -import org.labkey.api.admin.FolderSerializationRegistry; -import org.labkey.api.assay.AssayProvider; -import org.labkey.api.assay.AssayService; -import org.labkey.api.attachments.AttachmentService; -import org.labkey.api.audit.AuditLogService; -import org.labkey.api.audit.SampleTimelineAuditEvent; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerFilter; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DbSchema; -import org.labkey.api.data.DbSchemaType; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.NameGenerator; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.SimpleFilter.FilterClause; -import org.labkey.api.data.SqlSelector; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpgradeCode; -import org.labkey.api.defaults.DefaultValueService; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.ExperimentRunType; -import org.labkey.api.exp.Lsid; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.DefaultExperimentDataHandler; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataClass; -import org.labkey.api.exp.api.ExpLineageService; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpProtocolAttachmentType; -import org.labkey.api.exp.api.ExpRunAttachmentType; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentJSONConverter; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.FilterProtocolInputCriteria; -import org.labkey.api.exp.api.SampleTypeDomainKind; -import org.labkey.api.exp.api.SampleTypeService; -import org.labkey.api.exp.api.StorageProvisioner; -import org.labkey.api.exp.property.DomainAuditProvider; -import org.labkey.api.exp.property.DomainPropertyAuditProvider; -import org.labkey.api.exp.property.ExperimentProperty; -import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.exp.property.SystemProperty; -import org.labkey.api.exp.query.ExpDataClassTable; -import org.labkey.api.exp.query.ExpSampleTypeTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.exp.query.SamplesSchema; -import org.labkey.api.exp.xar.LSIDRelativizer; -import org.labkey.api.exp.xar.LsidUtils; -import org.labkey.api.files.FileContentService; -import org.labkey.api.files.TableUpdaterFileListener; -import org.labkey.api.migration.DatabaseMigrationService; -import org.labkey.api.migration.ExperimentDeleteService; -import org.labkey.api.migration.MigrationTableHandler; -import org.labkey.api.module.ModuleContext; -import org.labkey.api.module.ModuleLoader; -import org.labkey.api.module.SpringModule; -import org.labkey.api.module.Summary; -import org.labkey.api.ontology.OntologyService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.ontology.Unit; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.FilteredTable; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.UserSchema; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.roles.RoleManager; -import org.labkey.api.settings.AppProps; -import org.labkey.api.settings.OptionalFeatureService; -import org.labkey.api.usageMetrics.UsageMetricsService; -import org.labkey.api.util.GUID; -import org.labkey.api.util.JspTestCase; -import org.labkey.api.util.PageFlowUtil; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.SystemMaintenance; -import org.labkey.api.view.AlwaysAvailableWebPartFactory; -import org.labkey.api.view.BaseWebPartFactory; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.JspView; -import org.labkey.api.view.Portal; -import org.labkey.api.view.ViewContext; -import org.labkey.api.view.WebPartFactory; -import org.labkey.api.view.WebPartView; -import org.labkey.api.view.template.WarningService; -import org.labkey.api.vocabulary.security.DesignVocabularyPermission; -import org.labkey.api.webdav.WebdavResource; -import org.labkey.api.webdav.WebdavService; -import org.labkey.api.writer.ContainerUser; -import org.labkey.experiment.api.DataClassDomainKind; -import org.labkey.experiment.api.ExpDataClassImpl; -import org.labkey.experiment.api.ExpDataClassTableImpl; -import org.labkey.experiment.api.ExpDataClassType; -import org.labkey.experiment.api.ExpDataImpl; -import org.labkey.experiment.api.ExpDataTableImpl; -import org.labkey.experiment.api.ExpMaterialImpl; -import org.labkey.experiment.api.ExpProtocolImpl; -import org.labkey.experiment.api.ExpSampleTypeImpl; -import org.labkey.experiment.api.ExpSampleTypeTableImpl; -import org.labkey.experiment.api.ExperimentServiceImpl; -import org.labkey.experiment.api.ExperimentStressTest; -import org.labkey.experiment.api.GraphAlgorithms; -import org.labkey.experiment.api.LineageTest; -import org.labkey.experiment.api.LogDataType; -import org.labkey.experiment.api.Protocol; -import org.labkey.experiment.api.SampleTypeServiceImpl; -import org.labkey.experiment.api.SampleTypeUpdateServiceDI; -import org.labkey.experiment.api.UniqueValueCounterTestCase; -import org.labkey.experiment.api.VocabularyDomainKind; -import org.labkey.experiment.api.data.ChildOfCompareType; -import org.labkey.experiment.api.data.ChildOfMethod; -import org.labkey.experiment.api.data.LineageCompareType; -import org.labkey.experiment.api.data.ParentOfCompareType; -import org.labkey.experiment.api.data.ParentOfMethod; -import org.labkey.experiment.api.property.DomainImpl; -import org.labkey.experiment.api.property.DomainPropertyImpl; -import org.labkey.experiment.api.property.LengthValidator; -import org.labkey.experiment.api.property.LookupValidator; -import org.labkey.experiment.api.property.PropertyServiceImpl; -import org.labkey.experiment.api.property.RangeValidator; -import org.labkey.experiment.api.property.RegExValidator; -import org.labkey.experiment.api.property.StorageNameGenerator; -import org.labkey.experiment.api.property.StorageProvisionerImpl; -import org.labkey.experiment.api.property.TextChoiceValidator; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.experiment.controllers.property.PropertyController; -import org.labkey.experiment.defaults.DefaultValueServiceImpl; -import org.labkey.experiment.lineage.ExpLineageServiceImpl; -import org.labkey.experiment.lineage.LineagePerfTest; -import org.labkey.experiment.pipeline.ExperimentPipelineProvider; -import org.labkey.experiment.pipeline.XarTestPipelineJob; -import org.labkey.experiment.samples.DataClassFolderImporter; -import org.labkey.experiment.samples.DataClassFolderWriter; -import org.labkey.experiment.samples.SampleStatusFolderImporter; -import org.labkey.experiment.samples.SampleTimelineAuditProvider; -import org.labkey.experiment.samples.SampleTypeFolderImporter; -import org.labkey.experiment.samples.SampleTypeFolderWriter; -import org.labkey.experiment.security.DataClassDesignerRole; -import org.labkey.experiment.security.SampleTypeDesignerRole; -import org.labkey.experiment.types.TypesController; -import org.labkey.experiment.xar.FolderXarImporterFactory; -import org.labkey.experiment.xar.FolderXarWriterFactory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -import static org.labkey.api.data.ColumnRenderPropertiesImpl.STORAGE_UNIQUE_ID_CONCEPT_URI; -import static org.labkey.api.data.ColumnRenderPropertiesImpl.TEXT_CHOICE_CONCEPT_URI; -import static org.labkey.api.exp.api.ExperimentService.MODULE_NAME; -import static org.labkey.api.exp.query.ExpSchema.SAMPLE_FILES_TABLE; - -public class ExperimentModule extends SpringModule -{ - private static final String SAMPLE_TYPE_WEB_PART_NAME = "Sample Types"; - private static final String PROTOCOL_WEB_PART_NAME = "Protocols"; - - public static final String AMOUNT_AND_UNIT_UPGRADE_PROP = "AmountAndUnitAudit"; - public static final String TRANSACTION_ID_PROP = "AuditTransactionId"; - public static final String AUDIT_COUNT_PROP = "AuditRecordCount"; - public static final String EXPERIMENT_RUN_WEB_PART_NAME = "Experiment Runs"; - - @Override - public String getName() - { - return MODULE_NAME; - } - - @Override - public Double getSchemaVersion() - { - return 26.004; - } - - @Nullable - @Override - public UpgradeCode getUpgradeCode() - { - return new ExperimentUpgradeCode(); - } - - @Override - protected void init() - { - addController("experiment", ExperimentController.class); - addController("experiment-types", TypesController.class); - addController("property", PropertyController.class); - ExperimentService.setInstance(new ExperimentServiceImpl()); - SampleTypeService.setInstance(new SampleTypeServiceImpl()); - DefaultValueService.setInstance(new DefaultValueServiceImpl()); - StorageProvisioner.setInstance(StorageProvisionerImpl.get()); - ExpLineageService.setInstance(new ExpLineageServiceImpl()); - - PropertyServiceImpl propertyServiceImpl = new PropertyServiceImpl(); - PropertyService.setInstance(propertyServiceImpl); - UsageMetricsService.get().registerUsageMetrics(getName(), propertyServiceImpl); - - UsageMetricsService.get().registerUsageMetrics(getName(), FileLinkMetricsProvider.getInstance()); - - ExperimentProperty.register(); - SamplesSchema.register(this); - ExpSchema.register(this); - - PropertyService.get().registerDomainKind(new SampleTypeDomainKind()); - PropertyService.get().registerDomainKind(new DataClassDomainKind()); - PropertyService.get().registerDomainKind(new VocabularyDomainKind()); - - QueryService.get().addCompareType(new ChildOfCompareType()); - QueryService.get().addCompareType(new ParentOfCompareType()); - QueryService.get().addCompareType(new LineageCompareType()); - QueryService.get().registerMethod(ChildOfMethod.NAME, new ChildOfMethod(), JdbcType.BOOLEAN, 2, 3); - QueryService.get().registerMethod(ParentOfMethod.NAME, new ParentOfMethod(), JdbcType.BOOLEAN, 2, 3); - QueryService.get().addQueryListener(new ExperimentQueryChangeListener()); - QueryService.get().addQueryListener(new PropertyQueryChangeListener()); - - PropertyService.get().registerValidatorKind(new RegExValidator()); - PropertyService.get().registerValidatorKind(new RangeValidator()); - PropertyService.get().registerValidatorKind(new LookupValidator()); - PropertyService.get().registerValidatorKind(new LengthValidator()); - PropertyService.get().registerValidatorKind(new TextChoiceValidator()); - - ExperimentService.get().registerExperimentDataHandler(new DefaultExperimentDataHandler()); - ExperimentService.get().registerProtocolInputCriteria(new FilterProtocolInputCriteria.Factory()); - ExperimentService.get().registerNameExpressionType("sampletype", "exp", "MaterialSource", "nameexpression"); - ExperimentService.get().registerNameExpressionType("aliquots", "exp", "MaterialSource", "aliquotnameexpression"); - ExperimentService.get().registerNameExpressionType("dataclass", "exp", "DataClass", "nameexpression"); - - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.EXPERIMENTAL_RESOLVE_PROPERTY_URI_COLUMNS, "Resolve property URIs as columns on experiment tables", - "If a column is not found on an experiment table, attempt to resolve the column name as a Property URI and add it as a property column", false); - if (CoreSchema.getInstance().getSqlDialect().isSqlServer()) - { - OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_WITH_COUNTER, "Use strict incremental withCounter and rootSampleCount expression", - "When withCounter or rootSampleCount is used in name expression, make sure the count increments one-by-one and does not jump.", true); - } - else - { - OptionalFeatureService.get().addExperimentalFeatureFlag(SAMPLE_FILES_TABLE, "Manage Unreferenced Sample Files", - "Enable 'Unreferenced Sample Files' table to view and delete sample files that are no longer referenced by samples", false); - - OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_ALLOW_GAP_COUNTER, "Allow gap with withCounter and rootSampleCount expression", - "Check this option if gaps in the count generated by withCounter or rootSampleCount name expression are allowed.", true); - - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.MULTI_VALUE_TEXT_CHOICE, "Allow multi-value Text Choice properties", - "Support selecting more than one value for text choice fields", false); - } - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.QUANTITY_COLUMN_SUFFIX_TESTING, "Quantity column suffix testing", - "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); - OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", - "Support for querying lineage of experiment objects", false); - OptionalFeatureService.get().addExperimentalFeatureFlag(SampleTypeUpdateServiceDI.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE, "Allow RowId to be accepted when merging samples", - "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); - - RoleManager.registerPermission(new DesignVocabularyPermission(), true); - RoleManager.registerRole(new SampleTypeDesignerRole()); - RoleManager.registerRole(new DataClassDesignerRole()); - - AttachmentService.get().registerAttachmentParentType(ExpRunAttachmentType.get()); - AttachmentService.get().registerAttachmentParentType(ExpProtocolAttachmentType.get()); - - WebdavService.get().addExpDataProvider((path, container) -> ExperimentService.get().getAllExpDataByURL(path, container)); - ExperimentService.get().registerObjectReferencer(ExperimentServiceImpl.get()); - - addModuleProperty(new LineageMaximumDepthModuleProperty(this)); - WarningService.get().register(new ExperimentWarningProvider()); - } - - @Override - public boolean hasScripts() - { - return true; - } - - @Override - @NotNull - protected Collection createWebPartFactories() - { - List result = new ArrayList<>(); - - BaseWebPartFactory runGroupsFactory = new BaseWebPartFactory(RunGroupWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new RunGroupWebPart(portalCtx, WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), webPart); - } - }; - runGroupsFactory.addLegacyNames("Experiments", "Experiment", "Experiment Navigator", "Narrow Experiments"); - result.add(runGroupsFactory); - - BaseWebPartFactory runTypesFactory = new BaseWebPartFactory(RunTypeWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new RunTypeWebPart(); - } - }; - result.add(runTypesFactory); - - result.add(new ExperimentRunWebPartFactory()); - BaseWebPartFactory sampleTypeFactory = new BaseWebPartFactory(SAMPLE_TYPE_WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new SampleTypeWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); - } - }; - sampleTypeFactory.addLegacyNames("Narrow Sample Sets", "Sample Sets"); - result.add(sampleTypeFactory); - result.add(new AlwaysAvailableWebPartFactory("Samples Menu", false, false, WebPartFactory.LOCATION_MENUBAR) { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - WebPartView view = new JspView<>("/org/labkey/experiment/samplesAndAnalytes.jsp", webPart); - view.setTitle("Samples"); - return view; - } - }); - - result.add(new AlwaysAvailableWebPartFactory("Data Classes", false, false, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new DataClassWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx, webPart); - } - }); - - BaseWebPartFactory narrowProtocolFactory = new BaseWebPartFactory(PROTOCOL_WEB_PART_NAME, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new ProtocolWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); - } - }; - narrowProtocolFactory.addLegacyNames("Narrow Protocols"); - result.add(narrowProtocolFactory); - - return result; - } - - private void addDataResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver() - { - @Override - public WebdavResource resolve(@NotNull String resourceIdentifier) - { - ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); - if (data == null) - return null; - - return data.createIndexDocument(null); - } - - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); - if (data == null) - return null; - - return ExperimentJSONConverter.serializeData(data, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); - } - - @Override - public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) - { - Map idDataMap = ExpDataImpl.fromDocumentIds(resourceIdentifiers); - if (idDataMap == null) - return null; - - Map> searchJsonMap = new HashMap<>(); - for (String resourceIdentifier : idDataMap.keySet()) - searchJsonMap.put(resourceIdentifier, ExperimentJSONConverter.serializeData(idDataMap.get(resourceIdentifier), user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap()); - return searchJsonMap; - } - }); - } - - private void addDataClassResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpDataClass dataClass = ExperimentService.get().getDataClass(rowId); - if (dataClass == null) - return null; - - Map properties = ExperimentJSONConverter.serializeExpObject(dataClass, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); - - //Need to map to proper Icon - properties.put("type", "dataClass" + (dataClass.getCategory() != null ? ":" + dataClass.getCategory() : "")); - - return properties; - } - }); - } - - private void addSampleTypeResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpSampleType sampleType = SampleTypeService.get().getSampleType(rowId); - if (sampleType == null) - return null; - - Map properties = ExperimentJSONConverter.serializeExpObject(sampleType, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); - - //Need to map to proper Icon - properties.put("type", "sampleSet"); - - return properties; - } - }); - } - - private void addSampleResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpMaterial material = ExperimentService.get().getExpMaterial(rowId); - if (material == null) - return null; - - return ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); - } - - @Override - public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) - { - Set rowIds = new HashSet<>(); - Map rowIdIdentifierMap = new LongHashMap<>(); - for (String resourceIdentifier : resourceIdentifiers) - { - long rowId = NumberUtils.toLong(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId != 0) - { - rowIds.add(rowId); - rowIdIdentifierMap.put(rowId, resourceIdentifier); - } - } - - Map> searchJsonMap = new HashMap<>(); - for (ExpMaterial material : ExperimentService.get().getExpMaterials(rowIds)) - { - searchJsonMap.put( - rowIdIdentifierMap.get(material.getRowId()), - ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap() - ); - } - - return searchJsonMap; - } - }); - } - - @Override - protected void startupAfterSpringConfig(ModuleContext moduleContext) - { - SearchService ss = SearchService.get(); -// ss.addSearchCategory(OntologyManager.conceptCategory); - ss.addSearchCategory(ExpSampleTypeImpl.searchCategory); - ss.addSearchCategory(ExpSampleTypeImpl.mediaSearchCategory); - ss.addSearchCategory(ExpMaterialImpl.searchCategory); - ss.addSearchCategory(ExpMaterialImpl.mediaSearchCategory); - ss.addSearchCategory(ExpDataClassImpl.SEARCH_CATEGORY); - ss.addSearchCategory(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY); - ss.addSearchCategory(ExpDataImpl.expDataCategory); - ss.addSearchCategory(ExpDataImpl.expMediaDataCategory); - ss.addSearchResultTemplate(new ExpDataImpl.DataSearchResultTemplate()); - addDataResourceResolver(ExpDataImpl.expDataCategory.getName()); - addDataResourceResolver(ExpDataImpl.expMediaDataCategory.getName()); - addDataClassResourceResolver(ExpDataClassImpl.SEARCH_CATEGORY.getName()); - addDataClassResourceResolver(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY.getName()); - addSampleTypeResourceResolver(ExpSampleTypeImpl.searchCategory.getName()); - addSampleTypeResourceResolver(ExpSampleTypeImpl.mediaSearchCategory.getName()); - addSampleResourceResolver(ExpMaterialImpl.searchCategory.getName()); - addSampleResourceResolver(ExpMaterialImpl.mediaSearchCategory.getName()); - ss.addDocumentProvider(ExperimentServiceImpl.get()); - - PipelineService.get().registerPipelineProvider(new ExperimentPipelineProvider(this)); - ExperimentService.get().registerExperimentRunTypeSource(container -> Collections.singleton(ExperimentRunType.ALL_RUNS_TYPE)); - ExperimentService.get().registerDataType(new LogDataType()); - - AuditLogService.get().registerAuditType(new DomainAuditProvider()); - AuditLogService.get().registerAuditType(new DomainPropertyAuditProvider()); - AuditLogService.get().registerAuditType(new ExperimentAuditProvider()); - AuditLogService.get().registerAuditType(new SampleTypeAuditProvider()); - AuditLogService.get().registerAuditType(new SampleTimelineAuditProvider()); - - FileContentService fileContentService = FileContentService.get(); - if (null != fileContentService) - { - fileContentService.addFileListener(new ExpDataFileListener()); - fileContentService.addFileListener(new TableUpdaterFileListener(ExperimentService.get().getTinfoExperimentRun(), "FilePathRoot", TableUpdaterFileListener.Type.fileRootPath, "RowId")); - fileContentService.addFileListener(new FileLinkFileListener()); - } - ContainerManager.addContainerListener(new ContainerManager.ContainerListener() - { - @Override - public void containerDeleted(Container c, User user) - { - try - { - ExperimentService.get().deleteAllExpObjInContainer(c, user); - } - catch (ExperimentException ee) - { - throw new RuntimeException(ee); - } - } - }, - // This is in the Last group because when a container is deleted, - // the Experiment listener needs to be called after the Study listener, - // because Study needs the metadata held by Experiment to delete properly. - // but it should be before the CoreContainerListener - ContainerManager.ContainerListener.Order.Last); - - if (ModuleLoader.getInstance().shouldInsertData()) - SystemProperty.registerProperties(); - - FolderSerializationRegistry folderRegistry = FolderSerializationRegistry.get(); - if (null != folderRegistry) - { - folderRegistry.addFactories(new FolderXarWriterFactory(), new FolderXarImporterFactory()); - folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDesignWriter.Factory()); - folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDataWriter.Factory()); - folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDesignWriter.Factory()); - folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDataWriter.Factory()); - folderRegistry.addImportFactory(new SampleTypeFolderImporter.Factory()); - folderRegistry.addImportFactory(new DataClassFolderImporter.Factory()); - folderRegistry.addImportFactory(new SampleStatusFolderImporter.Factory()); - } - - AttachmentService.get().registerAttachmentParentType(ExpDataClassType.get()); - - WebdavService.get().addProvider(new ScriptsResourceProvider()); - - SystemMaintenance.addTask(new FileLinkMetricsMaintenanceTask()); - - UsageMetricsService svc = UsageMetricsService.get(); - if (null != svc) - { - svc.registerUsageMetrics(getName(), () -> { - Map results = new HashMap<>(); - - DbSchema schema = ExperimentService.get().getSchema(); - if (AssayService.get() != null) - { - Map assayMetrics = new HashMap<>(); - SQLFragment baseRunSQL = new SQLFragment("SELECT COUNT(*) FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "r").append(" WHERE lsid LIKE ?"); - SQLFragment baseProtocolSQL = new SQLFragment("SELECT * FROM ").append(ExperimentService.get().getTinfoProtocol(), "p").append(" WHERE lsid LIKE ? AND ApplicationType = ?"); - for (AssayProvider assayProvider : AssayService.get().getAssayProviders()) - { - Map protocolMetrics = new HashMap<>(); - - // Run count across all assay designs of this type - SQLFragment runSQL = new SQLFragment(baseRunSQL); - runSQL.add(Lsid.namespaceLikeString(assayProvider.getRunLSIDPrefix())); - protocolMetrics.put("runCount", new SqlSelector(schema, runSQL).getObject(Long.class)); - - // Number of assay designs of this type - SQLFragment protocolSQL = new SQLFragment(baseProtocolSQL); - protocolSQL.add(assayProvider.getProtocolPattern()); - protocolSQL.add(ExpProtocol.ApplicationType.ExperimentRun.toString()); - List protocols = new SqlSelector(schema, protocolSQL).getArrayList(Protocol.class); - protocolMetrics.put("protocolCount", protocols.size()); - - List wrappedProtocols = protocols.stream().map(ExpProtocolImpl::new).collect(Collectors.toList()); - - protocolMetrics.put("resultRowCount", assayProvider.getResultRowCount(wrappedProtocols)); - - // Primary implementation class - protocolMetrics.put("implementingClass", assayProvider.getClass()); - - assayMetrics.put(assayProvider.getName(), protocolMetrics); - } - assayMetrics.put("autoLinkedAssayCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.propertyuri = 'terms.labkey.org#AutoCopyTargetContainer'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptRunOnEditCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptRunOnImportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); - - assayMetrics.put("standardAssayWithPlateSupportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'PlateMetadata' AND floatValue = 1").getObject(Long.class)); - SQLFragment runsWithPlateSQL = new SQLFragment(""" - SELECT COUNT(*) FROM exp.experimentrun r - INNER JOIN exp.object o ON o.objectUri = r.lsid - INNER JOIN exp.objectproperty op ON op.objectId = o.objectId - WHERE op.propertyid IN ( - SELECT propertyid FROM exp.propertydescriptor WHERE name = ? AND lookupquery = ? - )"""); - assayMetrics.put("standardAssayRunsWithPlateTemplate", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateTemplate").add("PlateTemplate")).getObject(Long.class)); - assayMetrics.put("standardAssayRunsWithPlateSet", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateSet").add("PlateSet")).getObject(Long.class)); - - assayMetrics.put("assayRunsFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - - assayMetrics.put("assayResultsFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - - // metric to count the number of Luminex and Standard assay runs that were imported with > 1 data file - assayMetrics.put("assayRunsWithMultipleInputFiles", new SqlSelector(schema, """ - SELECT COUNT(*) FROM ( - SELECT sourceapplicationid, COUNT(*) AS count FROM exp.data - WHERE lsid NOT LIKE '%:RelatedFile.%' AND sourceapplicationid IN ( - SELECT rowid FROM exp.protocolapplication - WHERE lsid LIKE '%:SimpleProtocol.CoreStep' AND (protocollsid LIKE '%:LuminexAssayProtocol.%' OR protocollsid LIKE '%:GeneralAssayProtocol.%') - ) - GROUP BY sourceapplicationid - ) x WHERE count > 1""").getObject(Long.class)); - - Map sampleLookupCountMetrics = new HashMap<>(); - SQLFragment baseAssaySampleLookupSQL = new SQLFragment("SELECT COUNT(*) FROM exp.propertydescriptor WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) AND propertyuri LIKE ?"); - - SQLFragment batchAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - batchAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Batch.getPrefix() + ".%"); - sampleLookupCountMetrics.put("batchDomain", new SqlSelector(schema, batchAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment runAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - runAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%"); - sampleLookupCountMetrics.put("runDomain", new SqlSelector(schema, runAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment resultAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - resultAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); - sampleLookupCountMetrics.put("resultDomain", new SqlSelector(schema, resultAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment resultAssayMultipleSampleLookupSQL = new SQLFragment( - """ - SELECT COUNT(*) FROM ( - SELECT PD.domainid, COUNT(*) AS PropCount - FROM exp.propertydescriptor D - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) - AND propertyuri LIKE ? - GROUP BY PD.domainid - ) X WHERE X.PropCount > 1""" - ); - resultAssayMultipleSampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); - sampleLookupCountMetrics.put("resultDomainWithMultiple", new SqlSelector(schema, resultAssayMultipleSampleLookupSQL).getObject(Long.class)); - - assayMetrics.put("sampleLookupCount", sampleLookupCountMetrics); - - - // Putting these metrics at the same level as the other BooleanColumnCount metrics (e.g., sampleTypeWithBooleanColumnCount) - results.put("assayResultWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - - results.put("assayRunWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - - results.put("assay", assayMetrics); - } - - results.put("autoLinkedSampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource WHERE autoLinkTargetContainer IS NOT NULL").getObject(Long.class)); - results.put("sampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource").getObject(Long.class)); - - if (schema.getSqlDialect().isPostgreSQL()) // SQLServer does not support regular expression queries - { - Collection> numSampleCounts = new SqlSelector(schema, """ - SELECT totalCount, numberNameCount FROM - (SELECT cpastype, COUNT(*) AS totalCount from exp.material GROUP BY cpastype) t - JOIN - (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.material m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns - ON t.cpastype = ns.cpastype""").getMapCollection(); - results.put("sampleSetWithNumberNamesCount", numSampleCounts.size()); - results.put("sampleSetWithOnlyNumberNamesCount", numSampleCounts.stream().filter( - map -> (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount") - ).count()); - } - UserSchema userSchema = AuditLogService.getAuditLogSchema(User.getSearchUser(), ContainerManager.getRoot()); - FilteredTable table = (FilteredTable) userSchema.getTable(SampleTimelineAuditEvent.EVENT_TYPE); - - SQLFragment sql = new SQLFragment("SELECT COUNT(*)\n" + - " FROM (\n" + - " -- updates that are marked as lineage updates\n" + - " (SELECT DISTINCT transactionId\n" + - " FROM " + table.getRealTable().getFromSQL("").getSQL() +"\n" + - " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanTRUE() + "\n" + - " AND comment = 'Sample was updated.'\n" + - " ) a1\n" + - " JOIN\n" + - " -- but have associated entries that are not lineage updates\n" + - " (SELECT DISTINCT transactionid\n" + - " FROM " + table.getRealTable().getFromSQL("").getSQL() + "\n" + - " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanFALSE() + ") a2\n" + - " ON a1.transactionid = a2.transactionid\n" + - " )"); - - results.put("sampleLineageAuditDiscrepancyCount", new SqlSelector(schema, sql.getSQL()).getObject(Long.class)); - - results.put("sampleCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material").getObject(Long.class)); - results.put("aliquotCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material where aliquotedfromlsid IS NOT NULL").getObject(Long.class)); - results.put("sampleNullAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount IS NULL").getObject(Long.class)); - results.put("sampleNegativeAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount < 0").getObject(Long.class)); - results.put("sampleUnitsDifferCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.material m JOIN exp.materialSource s ON m.materialsourceid = s.rowid WHERE m.units != s.metricunit").getObject(Long.class)); - results.put("sampleTypesWithoutUnitsCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IS NULL").getObject(Long.class)); - results.put("sampleTypesWithMassTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('kg', 'g', 'mg', 'ug', 'ng')").getObject(Long.class)); - results.put("sampleTypesWithVolumeTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('L', 'mL', 'uL')").getObject(Long.class)); - results.put("sampleTypesWithCountTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit = ?", "unit").getObject(Long.class)); - - results.put("duplicateSampleMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + - "(SELECT name, cpastype FROM exp.material WHERE cpastype <> 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); - results.put("duplicateSpecimenMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + - "(SELECT name, cpastype FROM exp.material WHERE cpastype = 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); - String duplicateCaseInsensitiveSampleNameCountSql = """ - SELECT COUNT(*) FROM - ( - SELECT 1 AS found - FROM exp.material - WHERE materialsourceid IS NOT NULL - GROUP BY LOWER(name), materialsourceid - HAVING COUNT(*) > 1 - ) AS duplicates - """; - String duplicateCaseInsensitiveDataNameCountSql = """ - SELECT COUNT(*) FROM - ( - SELECT 1 AS found - FROM exp.data - WHERE classid IS NOT NULL - GROUP BY LOWER(name), classid - HAVING COUNT(*) > 1 - ) AS duplicates - """; - results.put("duplicateCaseInsensitiveSampleNameCount", new SqlSelector(schema, duplicateCaseInsensitiveSampleNameCountSql).getObject(Long.class)); - results.put("duplicateCaseInsensitiveDataNameCount", new SqlSelector(schema, duplicateCaseInsensitiveDataNameCountSql).getObject(Long.class)); - - results.put("dataClassCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.dataclass").getObject(Long.class)); - results.put("dataClassRowCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.data WHERE classid IN (SELECT rowid FROM exp.dataclass)").getObject(Long.class)); - results.put("dataWithDataParentsCount", new SqlSelector(schema, "SELECT COUNT(DISTINCT d.sourceApplicationId) FROM exp.data d\n" + - "JOIN exp.datainput di ON di.targetapplicationid = d.sourceapplicationid").getObject(Long.class)); - if (schema.getSqlDialect().isPostgreSQL()) - { - Collection> numDataClassObjectsCounts = new SqlSelector(schema, """ - SELECT totalCount, numberNameCount FROM - (SELECT cpastype, COUNT(*) AS totalCount from exp.data GROUP BY cpastype) t - JOIN - (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.data m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns - ON t.cpastype = ns.cpastype""").getMapCollection(); - results.put("dataClassWithNumberNamesCount", numDataClassObjectsCounts.size()); - results.put("dataClassWithOnlyNumberNamesCount", numDataClassObjectsCounts.stream().filter(map -> - (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount")).count()); - } - - results.put("ontologyPrincipalConceptCodeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE principalconceptcode IS NOT NULL").getObject(Long.class)); - results.put("ontologyLookupColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", OntologyService.conceptCodeConceptURI).getObject(Long.class)); - results.put("ontologyConceptSubtreeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptsubtree IS NOT NULL").getObject(Long.class)); - results.put("ontologyConceptImportColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptimportcolumn IS NOT NULL").getObject(Long.class)); - results.put("ontologyConceptLabelColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptlabelcolumn IS NOT NULL").getObject(Long.class)); - - results.put("scannableColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE scannable = ?", true).getObject(Long.class)); - results.put("uniqueIdColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); - results.put("sampleTypeWithUniqueIdCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.conceptURI = ?""", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); - - results.put("fileColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithMultiValueColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("sampleTypeAliquotSpecificField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ChildOnly.name()).getObject(Long.class)); - results.put("sampleTypeParentOnlyField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND (D.derivationDataScope = ? OR D.derivationDataScope IS NULL)""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ParentOnly.name()).getObject(Long.class)); - results.put("sampleTypeParentAndAliquotField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.All.name()).getObject(Long.class)); - - results.put("attachmentColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithAttachmentColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithMultiValueColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("textChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", TEXT_CHOICE_CONCEPT_URI).getObject(Long.class)); - results.put("multiValueTextChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeuri = ?", PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithDateTimeColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.DATE_TIME.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithDateColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.DATE.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithTimeColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.TIME.getTypeUri()).getObject(Long.class)); - - results.put("maxObjectObjectId", new SqlSelector(schema, "SELECT MAX(ObjectId) FROM exp.Object").getObject(Long.class)); - results.put("maxMaterialRowId", new SqlSelector(schema, "SELECT MAX(RowId) FROM exp.Material").getObject(Long.class)); - - results.putAll(ExperimentService.get().getDomainMetrics()); - - return results; - }); - } - } - - @Override - public void registerMigrationHandlers(@NotNull DatabaseMigrationService service) - { - ExperimentMigrationSchemaHandler handler = new ExperimentMigrationSchemaHandler(); - service.registerSchemaHandler(handler); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("premium", DbSchemaType.Bare).getTable("Exclusions"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("premium", DbSchemaType.Bare).getTable("ExclusionMaps"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("ExclusionId", "RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("assayrequest", DbSchemaType.Bare).getTable("RequestRunsJunction"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerSchemaHandler(new SampleTypeMigrationSchemaHandler()); - DataClassMigrationSchemaHandler dcHandler = new DataClassMigrationSchemaHandler(); - service.registerSchemaHandler(dcHandler); - ExperimentDeleteService.setInstance(dcHandler); - } - - @Override - @NotNull - public Collection getSummary(Container c) - { - Collection list = new LinkedList<>(); - int runGroupCount = ExperimentService.get().getExperiments(c, null, false, true).size(); - if (runGroupCount > 0) - list.add(StringUtilsLabKey.pluralize(runGroupCount, "Run Group")); - - User user = HttpView.currentContext().getUser(); - - Set runTypes = ExperimentService.get().getExperimentRunTypes(c); - for (ExperimentRunType runType : runTypes) - { - if (runType == ExperimentRunType.ALL_RUNS_TYPE) - continue; - - long runCount = runType.getRunCount(user, c); - if (runCount > 0) - list.add(runCount + " runs of type " + runType.getDescription()); - } - - int dataClassCount = ExperimentService.get().getDataClasses(c, false).size(); - if (dataClassCount > 0) - list.add(dataClassCount + " Data Class" + (dataClassCount > 1 ? "es" : "")); - - int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); - if (sampleTypeCount > 0) - list.add(sampleTypeCount + " Sample Type" + (sampleTypeCount > 1 ? "s" : "")); - - return list; - } - - @Override - public @NotNull ArrayList getDetailedSummary(Container c, User user) - { - ArrayList summaries = new ArrayList<>(); - - // Assay types - long assayTypeCount = AssayService.get().getAssayProtocols(c).stream().filter(p -> p.getContainer().equals(c)).count(); - if (assayTypeCount > 0) - summaries.add(new Summary(assayTypeCount, "Assay Type")); - - // Run count - int runGroupCount = ExperimentService.get().getExperiments(c, user, false, true).size(); - if (runGroupCount > 0) - summaries.add(new Summary(runGroupCount, "Assay run")); - - // Number of Data Classes - List dataClasses = ExperimentService.get().getDataClasses(c, false); - int dataClassCount = dataClasses.size(); - if (dataClassCount > 0) - summaries.add(new Summary(dataClassCount, "Data Class")); - - ExpSchema expSchema = new ExpSchema(user, c); - - // Individual Data Class row counts - { - // The table-level container filter is set to ensure data class types are included - // that may not be defined in the target container but may have rows of data in the target container - TableInfo table = ExpSchema.TableType.DataClasses.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); - - // Issue 47919: The "DataCount" column is filtered to only count data in the target container - if (table instanceof ExpDataClassTableImpl tableImpl) - tableImpl.setDataCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); - - Set columns = new LinkedHashSet<>(); - columns.add(ExpDataClassTable.Column.Name.name()); - columns.add(ExpDataClassTable.Column.DataCount.name()); - - Map results = new TableSelector(table, columns).getValueMap(String.class); - for (var entry : results.entrySet()) - { - long count = entry.getValue().longValue(); - if (count > 0) - summaries.add(new Summary(count, entry.getKey())); - } - } - - // Sample Types - int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); - if (sampleTypeCount > 0) - summaries.add(new Summary(sampleTypeCount, "Sample Type")); - - // Individual Sample Type row counts - { - // The table-level container filter is set to ensure data class types are included - // that may not be defined in the target container but may have rows of data in the target container - TableInfo table = ExpSchema.TableType.SampleSets.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); - - // Issue 51557: The "SampleCount" column is filtered to only count data in the target container - if (table instanceof ExpSampleTypeTableImpl tableImpl) - tableImpl.setSampleCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); - - Set columns = new LinkedHashSet<>(); - columns.add(ExpSampleTypeTable.Column.Name.name()); - columns.add(ExpSampleTypeTable.Column.SampleCount.name()); - - Map results = new TableSelector(table, columns).getValueMap(String.class); - for (var entry : results.entrySet()) - { - long count = entry.getValue().longValue(); - if (count > 0) - { - String name = entry.getKey(); - Summary s = name.equals("MixtureBatches") - ? new Summary(count, "Batch") - : new Summary(count, name); - summaries.add(s); - } - } - } - - return summaries; - } - - @Override - public @NotNull Set> getIntegrationTests() - { - return Set.of( - DomainImpl.TestCase.class, - DomainPropertyImpl.TestCase.class, - ExpDataTableImpl.TestCase.class, - ExperimentServiceImpl.AuditDomainUriTest.class, - ExperimentServiceImpl.LineageQueryTestCase.class, - ExperimentServiceImpl.ParseInputOutputAliasTestCase.class, - ExperimentServiceImpl.TestCase.class, - ExperimentStressTest.class, - LineagePerfTest.class, - LineageTest.class, - OntologyManager.TestCase.class, - PropertyServiceImpl.TestCase.class, - SampleTypeServiceImpl.TestCase.class, - StorageNameGenerator.TestCase.class, - StorageProvisionerImpl.TestCase.class, - UniqueValueCounterTestCase.class, - XarTestPipelineJob.TestCase.class - ); - } - - @Override - public @NotNull Collection>> getIntegrationTestFactories() - { - List>> list = new ArrayList<>(super.getIntegrationTestFactories()); - list.add(new JspTestCase("/org/labkey/experiment/api/ExpDataClassDataTestCase.jsp")); - list.add(new JspTestCase("/org/labkey/experiment/api/ExpSampleTypeTestCase.jsp")); - return list; - } - - @Override - public @NotNull Set> getUnitTests() - { - return Set.of( - GraphAlgorithms.TestCase.class, - LSIDRelativizer.TestCase.class, - Lsid.TestCase.class, - LsidUtils.TestCase.class, - PropertyController.TestCase.class, - Quantity.TestCase.class, - Unit.TestCase.class - ); - } - - @Override - @NotNull - public Collection getSchemaNames() - { - return List.of( - ExpSchema.SCHEMA_NAME, - DataClassDomainKind.PROVISIONED_SCHEMA_NAME, - SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME - ); - } - - @NotNull - @Override - public Collection getProvisionedSchemaNames() - { - return PageFlowUtil.set(DataClassDomainKind.PROVISIONED_SCHEMA_NAME, SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME); - } - - @Override - public JSONObject getPageContextJson(ContainerUser context) - { - JSONObject json = super.getPageContextJson(context); - json.put(SAMPLE_FILES_TABLE, OptionalFeatureService.get().isFeatureEnabled(SAMPLE_FILES_TABLE)); - return json; - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment; + +import org.apache.commons.lang3.math.NumberUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONObject; +import org.labkey.api.admin.FolderSerializationRegistry; +import org.labkey.api.assay.AssayProvider; +import org.labkey.api.assay.AssayService; +import org.labkey.api.attachments.AttachmentService; +import org.labkey.api.audit.AuditLogService; +import org.labkey.api.audit.SampleTimelineAuditEvent; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerFilter; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbSchemaType; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.NameGenerator; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SimpleFilter.FilterClause; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpgradeCode; +import org.labkey.api.defaults.DefaultValueService; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.ExperimentRunType; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.DefaultExperimentDataHandler; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataClass; +import org.labkey.api.exp.api.ExpLineageService; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpProtocolAttachmentType; +import org.labkey.api.exp.api.ExpRunAttachmentType; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentJSONConverter; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.FilterProtocolInputCriteria; +import org.labkey.api.exp.api.SampleTypeDomainKind; +import org.labkey.api.exp.api.SampleTypeService; +import org.labkey.api.exp.api.StorageProvisioner; +import org.labkey.api.exp.property.DomainAuditProvider; +import org.labkey.api.exp.property.DomainPropertyAuditProvider; +import org.labkey.api.exp.property.ExperimentProperty; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.property.SystemProperty; +import org.labkey.api.exp.query.ExpDataClassTable; +import org.labkey.api.exp.query.ExpSampleTypeTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.exp.query.SamplesSchema; +import org.labkey.api.exp.xar.LSIDRelativizer; +import org.labkey.api.exp.xar.LsidUtils; +import org.labkey.api.files.FileContentService; +import org.labkey.api.files.TableUpdaterFileListener; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.ExperimentDeleteService; +import org.labkey.api.migration.MigrationTableHandler; +import org.labkey.api.module.ModuleContext; +import org.labkey.api.module.ModuleLoader; +import org.labkey.api.module.SpringModule; +import org.labkey.api.module.Summary; +import org.labkey.api.ontology.OntologyService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.ontology.Unit; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.FilteredTable; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.UserSchema; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.roles.RoleManager; +import org.labkey.api.settings.AppProps; +import org.labkey.api.settings.OptionalFeatureService; +import org.labkey.api.usageMetrics.UsageMetricsService; +import org.labkey.api.util.GUID; +import org.labkey.api.util.JspTestCase; +import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.SystemMaintenance; +import org.labkey.api.view.AlwaysAvailableWebPartFactory; +import org.labkey.api.view.BaseWebPartFactory; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.JspView; +import org.labkey.api.view.Portal; +import org.labkey.api.view.ViewContext; +import org.labkey.api.view.WebPartFactory; +import org.labkey.api.view.WebPartView; +import org.labkey.api.view.template.WarningService; +import org.labkey.api.vocabulary.security.DesignVocabularyPermission; +import org.labkey.api.webdav.WebdavResource; +import org.labkey.api.webdav.WebdavService; +import org.labkey.api.writer.ContainerUser; +import org.labkey.experiment.api.DataClassDomainKind; +import org.labkey.experiment.api.ExpDataClassImpl; +import org.labkey.experiment.api.ExpDataClassTableImpl; +import org.labkey.experiment.api.ExpDataClassType; +import org.labkey.experiment.api.ExpDataImpl; +import org.labkey.experiment.api.ExpDataTableImpl; +import org.labkey.experiment.api.ExpMaterialImpl; +import org.labkey.experiment.api.ExpProtocolImpl; +import org.labkey.experiment.api.ExpSampleTypeImpl; +import org.labkey.experiment.api.ExpSampleTypeTableImpl; +import org.labkey.experiment.api.ExperimentServiceImpl; +import org.labkey.experiment.api.ExperimentStressTest; +import org.labkey.experiment.api.GraphAlgorithms; +import org.labkey.experiment.api.LineageTest; +import org.labkey.experiment.api.LogDataType; +import org.labkey.experiment.api.Protocol; +import org.labkey.experiment.api.SampleTypeServiceImpl; +import org.labkey.experiment.api.SampleTypeUpdateServiceDI; +import org.labkey.experiment.api.UniqueValueCounterTestCase; +import org.labkey.experiment.api.VocabularyDomainKind; +import org.labkey.experiment.api.data.ChildOfCompareType; +import org.labkey.experiment.api.data.ChildOfMethod; +import org.labkey.experiment.api.data.LineageCompareType; +import org.labkey.experiment.api.data.ParentOfCompareType; +import org.labkey.experiment.api.data.ParentOfMethod; +import org.labkey.experiment.api.property.DomainImpl; +import org.labkey.experiment.api.property.DomainPropertyImpl; +import org.labkey.experiment.api.property.LengthValidator; +import org.labkey.experiment.api.property.LookupValidator; +import org.labkey.experiment.api.property.PropertyServiceImpl; +import org.labkey.experiment.api.property.RangeValidator; +import org.labkey.experiment.api.property.RegExValidator; +import org.labkey.experiment.api.property.StorageNameGenerator; +import org.labkey.experiment.api.property.StorageProvisionerImpl; +import org.labkey.experiment.api.property.TextChoiceValidator; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.experiment.controllers.property.PropertyController; +import org.labkey.experiment.defaults.DefaultValueServiceImpl; +import org.labkey.experiment.lineage.ExpLineageServiceImpl; +import org.labkey.experiment.lineage.LineagePerfTest; +import org.labkey.experiment.pipeline.ExperimentPipelineProvider; +import org.labkey.experiment.pipeline.XarTestPipelineJob; +import org.labkey.experiment.samples.DataClassFolderImporter; +import org.labkey.experiment.samples.DataClassFolderWriter; +import org.labkey.experiment.samples.SampleStatusFolderImporter; +import org.labkey.experiment.samples.SampleTimelineAuditProvider; +import org.labkey.experiment.samples.SampleTypeFolderImporter; +import org.labkey.experiment.samples.SampleTypeFolderWriter; +import org.labkey.experiment.security.DataClassDesignerRole; +import org.labkey.experiment.security.SampleTypeDesignerRole; +import org.labkey.experiment.types.TypesController; +import org.labkey.experiment.xar.FolderXarImporterFactory; +import org.labkey.experiment.xar.FolderXarWriterFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.labkey.api.data.ColumnRenderPropertiesImpl.STORAGE_UNIQUE_ID_CONCEPT_URI; +import static org.labkey.api.data.ColumnRenderPropertiesImpl.TEXT_CHOICE_CONCEPT_URI; +import static org.labkey.api.exp.api.ExperimentService.MODULE_NAME; +import static org.labkey.api.exp.query.ExpSchema.SAMPLE_FILES_TABLE; + +public class ExperimentModule extends SpringModule +{ + private static final String SAMPLE_TYPE_WEB_PART_NAME = "Sample Types"; + private static final String PROTOCOL_WEB_PART_NAME = "Protocols"; + + public static final String AMOUNT_AND_UNIT_UPGRADE_PROP = "AmountAndUnitAudit"; + public static final String TRANSACTION_ID_PROP = "AuditTransactionId"; + public static final String AUDIT_COUNT_PROP = "AuditRecordCount"; + public static final String EXPERIMENT_RUN_WEB_PART_NAME = "Experiment Runs"; + + @Override + public String getName() + { + return MODULE_NAME; + } + + @Override + public Double getSchemaVersion() + { + return 26.004; + } + + @Nullable + @Override + public UpgradeCode getUpgradeCode() + { + return new ExperimentUpgradeCode(); + } + + @Override + protected void init() + { + addController("experiment", ExperimentController.class); + addController("experiment-types", TypesController.class); + addController("property", PropertyController.class); + ExperimentService.setInstance(new ExperimentServiceImpl()); + SampleTypeService.setInstance(new SampleTypeServiceImpl()); + DefaultValueService.setInstance(new DefaultValueServiceImpl()); + StorageProvisioner.setInstance(StorageProvisionerImpl.get()); + ExpLineageService.setInstance(new ExpLineageServiceImpl()); + + PropertyServiceImpl propertyServiceImpl = new PropertyServiceImpl(); + PropertyService.setInstance(propertyServiceImpl); + UsageMetricsService.get().registerUsageMetrics(getName(), propertyServiceImpl); + + UsageMetricsService.get().registerUsageMetrics(getName(), FileLinkMetricsProvider.getInstance()); + + ExperimentProperty.register(); + SamplesSchema.register(this); + ExpSchema.register(this); + + PropertyService.get().registerDomainKind(new SampleTypeDomainKind()); + PropertyService.get().registerDomainKind(new DataClassDomainKind()); + PropertyService.get().registerDomainKind(new VocabularyDomainKind()); + + QueryService.get().addCompareType(new ChildOfCompareType()); + QueryService.get().addCompareType(new ParentOfCompareType()); + QueryService.get().addCompareType(new LineageCompareType()); + QueryService.get().registerMethod(ChildOfMethod.NAME, new ChildOfMethod(), JdbcType.BOOLEAN, 2, 3); + QueryService.get().registerMethod(ParentOfMethod.NAME, new ParentOfMethod(), JdbcType.BOOLEAN, 2, 3); + QueryService.get().addQueryListener(new ExperimentQueryChangeListener()); + QueryService.get().addQueryListener(new PropertyQueryChangeListener()); + + PropertyService.get().registerValidatorKind(new RegExValidator()); + PropertyService.get().registerValidatorKind(new RangeValidator()); + PropertyService.get().registerValidatorKind(new LookupValidator()); + PropertyService.get().registerValidatorKind(new LengthValidator()); + PropertyService.get().registerValidatorKind(new TextChoiceValidator()); + + ExperimentService.get().registerExperimentDataHandler(new DefaultExperimentDataHandler()); + ExperimentService.get().registerProtocolInputCriteria(new FilterProtocolInputCriteria.Factory()); + ExperimentService.get().registerNameExpressionType("sampletype", "exp", "MaterialSource", "nameexpression"); + ExperimentService.get().registerNameExpressionType("aliquots", "exp", "MaterialSource", "aliquotnameexpression"); + ExperimentService.get().registerNameExpressionType("dataclass", "exp", "DataClass", "nameexpression"); + + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.EXPERIMENTAL_RESOLVE_PROPERTY_URI_COLUMNS, "Resolve property URIs as columns on experiment tables", + "If a column is not found on an experiment table, attempt to resolve the column name as a Property URI and add it as a property column", false); + if (CoreSchema.getInstance().getSqlDialect().isSqlServer()) + { + OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_WITH_COUNTER, "Use strict incremental withCounter and rootSampleCount expression", + "When withCounter or rootSampleCount is used in name expression, make sure the count increments one-by-one and does not jump.", true); + } + else + { + OptionalFeatureService.get().addExperimentalFeatureFlag(SAMPLE_FILES_TABLE, "Manage Unreferenced Sample Files", + "Enable 'Unreferenced Sample Files' table to view and delete sample files that are no longer referenced by samples", false); + + OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_ALLOW_GAP_COUNTER, "Allow gap with withCounter and rootSampleCount expression", + "Check this option if gaps in the count generated by withCounter or rootSampleCount name expression are allowed.", true); + + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.MULTI_VALUE_TEXT_CHOICE, "Allow multi-value Text Choice properties", + "Support selecting more than one value for text choice fields", false); + } + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.QUANTITY_COLUMN_SUFFIX_TESTING, "Quantity column suffix testing", + "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", + "Support for querying lineage of experiment objects", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE, "Allow RowId to be accepted when merging samples or dataclass data", + "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); + + RoleManager.registerPermission(new DesignVocabularyPermission(), true); + RoleManager.registerRole(new SampleTypeDesignerRole()); + RoleManager.registerRole(new DataClassDesignerRole()); + + AttachmentService.get().registerAttachmentParentType(ExpRunAttachmentType.get()); + AttachmentService.get().registerAttachmentParentType(ExpProtocolAttachmentType.get()); + + WebdavService.get().addExpDataProvider((path, container) -> ExperimentService.get().getAllExpDataByURL(path, container)); + ExperimentService.get().registerObjectReferencer(ExperimentServiceImpl.get()); + + addModuleProperty(new LineageMaximumDepthModuleProperty(this)); + WarningService.get().register(new ExperimentWarningProvider()); + } + + @Override + public boolean hasScripts() + { + return true; + } + + @Override + @NotNull + protected Collection createWebPartFactories() + { + List result = new ArrayList<>(); + + BaseWebPartFactory runGroupsFactory = new BaseWebPartFactory(RunGroupWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new RunGroupWebPart(portalCtx, WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), webPart); + } + }; + runGroupsFactory.addLegacyNames("Experiments", "Experiment", "Experiment Navigator", "Narrow Experiments"); + result.add(runGroupsFactory); + + BaseWebPartFactory runTypesFactory = new BaseWebPartFactory(RunTypeWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new RunTypeWebPart(); + } + }; + result.add(runTypesFactory); + + result.add(new ExperimentRunWebPartFactory()); + BaseWebPartFactory sampleTypeFactory = new BaseWebPartFactory(SAMPLE_TYPE_WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new SampleTypeWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); + } + }; + sampleTypeFactory.addLegacyNames("Narrow Sample Sets", "Sample Sets"); + result.add(sampleTypeFactory); + result.add(new AlwaysAvailableWebPartFactory("Samples Menu", false, false, WebPartFactory.LOCATION_MENUBAR) { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + WebPartView view = new JspView<>("/org/labkey/experiment/samplesAndAnalytes.jsp", webPart); + view.setTitle("Samples"); + return view; + } + }); + + result.add(new AlwaysAvailableWebPartFactory("Data Classes", false, false, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new DataClassWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx, webPart); + } + }); + + BaseWebPartFactory narrowProtocolFactory = new BaseWebPartFactory(PROTOCOL_WEB_PART_NAME, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new ProtocolWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); + } + }; + narrowProtocolFactory.addLegacyNames("Narrow Protocols"); + result.add(narrowProtocolFactory); + + return result; + } + + private void addDataResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver() + { + @Override + public WebdavResource resolve(@NotNull String resourceIdentifier) + { + ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); + if (data == null) + return null; + + return data.createIndexDocument(null); + } + + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); + if (data == null) + return null; + + return ExperimentJSONConverter.serializeData(data, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); + } + + @Override + public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) + { + Map idDataMap = ExpDataImpl.fromDocumentIds(resourceIdentifiers); + if (idDataMap == null) + return null; + + Map> searchJsonMap = new HashMap<>(); + for (String resourceIdentifier : idDataMap.keySet()) + searchJsonMap.put(resourceIdentifier, ExperimentJSONConverter.serializeData(idDataMap.get(resourceIdentifier), user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap()); + return searchJsonMap; + } + }); + } + + private void addDataClassResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpDataClass dataClass = ExperimentService.get().getDataClass(rowId); + if (dataClass == null) + return null; + + Map properties = ExperimentJSONConverter.serializeExpObject(dataClass, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); + + //Need to map to proper Icon + properties.put("type", "dataClass" + (dataClass.getCategory() != null ? ":" + dataClass.getCategory() : "")); + + return properties; + } + }); + } + + private void addSampleTypeResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpSampleType sampleType = SampleTypeService.get().getSampleType(rowId); + if (sampleType == null) + return null; + + Map properties = ExperimentJSONConverter.serializeExpObject(sampleType, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); + + //Need to map to proper Icon + properties.put("type", "sampleSet"); + + return properties; + } + }); + } + + private void addSampleResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpMaterial material = ExperimentService.get().getExpMaterial(rowId); + if (material == null) + return null; + + return ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); + } + + @Override + public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) + { + Set rowIds = new HashSet<>(); + Map rowIdIdentifierMap = new LongHashMap<>(); + for (String resourceIdentifier : resourceIdentifiers) + { + long rowId = NumberUtils.toLong(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId != 0) + { + rowIds.add(rowId); + rowIdIdentifierMap.put(rowId, resourceIdentifier); + } + } + + Map> searchJsonMap = new HashMap<>(); + for (ExpMaterial material : ExperimentService.get().getExpMaterials(rowIds)) + { + searchJsonMap.put( + rowIdIdentifierMap.get(material.getRowId()), + ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap() + ); + } + + return searchJsonMap; + } + }); + } + + @Override + protected void startupAfterSpringConfig(ModuleContext moduleContext) + { + SearchService ss = SearchService.get(); +// ss.addSearchCategory(OntologyManager.conceptCategory); + ss.addSearchCategory(ExpSampleTypeImpl.searchCategory); + ss.addSearchCategory(ExpSampleTypeImpl.mediaSearchCategory); + ss.addSearchCategory(ExpMaterialImpl.searchCategory); + ss.addSearchCategory(ExpMaterialImpl.mediaSearchCategory); + ss.addSearchCategory(ExpDataClassImpl.SEARCH_CATEGORY); + ss.addSearchCategory(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY); + ss.addSearchCategory(ExpDataImpl.expDataCategory); + ss.addSearchCategory(ExpDataImpl.expMediaDataCategory); + ss.addSearchResultTemplate(new ExpDataImpl.DataSearchResultTemplate()); + addDataResourceResolver(ExpDataImpl.expDataCategory.getName()); + addDataResourceResolver(ExpDataImpl.expMediaDataCategory.getName()); + addDataClassResourceResolver(ExpDataClassImpl.SEARCH_CATEGORY.getName()); + addDataClassResourceResolver(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY.getName()); + addSampleTypeResourceResolver(ExpSampleTypeImpl.searchCategory.getName()); + addSampleTypeResourceResolver(ExpSampleTypeImpl.mediaSearchCategory.getName()); + addSampleResourceResolver(ExpMaterialImpl.searchCategory.getName()); + addSampleResourceResolver(ExpMaterialImpl.mediaSearchCategory.getName()); + ss.addDocumentProvider(ExperimentServiceImpl.get()); + + PipelineService.get().registerPipelineProvider(new ExperimentPipelineProvider(this)); + ExperimentService.get().registerExperimentRunTypeSource(container -> Collections.singleton(ExperimentRunType.ALL_RUNS_TYPE)); + ExperimentService.get().registerDataType(new LogDataType()); + + AuditLogService.get().registerAuditType(new DomainAuditProvider()); + AuditLogService.get().registerAuditType(new DomainPropertyAuditProvider()); + AuditLogService.get().registerAuditType(new ExperimentAuditProvider()); + AuditLogService.get().registerAuditType(new SampleTypeAuditProvider()); + AuditLogService.get().registerAuditType(new SampleTimelineAuditProvider()); + + FileContentService fileContentService = FileContentService.get(); + if (null != fileContentService) + { + fileContentService.addFileListener(new ExpDataFileListener()); + fileContentService.addFileListener(new TableUpdaterFileListener(ExperimentService.get().getTinfoExperimentRun(), "FilePathRoot", TableUpdaterFileListener.Type.fileRootPath, "RowId")); + fileContentService.addFileListener(new FileLinkFileListener()); + } + ContainerManager.addContainerListener(new ContainerManager.ContainerListener() + { + @Override + public void containerDeleted(Container c, User user) + { + try + { + ExperimentService.get().deleteAllExpObjInContainer(c, user); + } + catch (ExperimentException ee) + { + throw new RuntimeException(ee); + } + } + }, + // This is in the Last group because when a container is deleted, + // the Experiment listener needs to be called after the Study listener, + // because Study needs the metadata held by Experiment to delete properly. + // but it should be before the CoreContainerListener + ContainerManager.ContainerListener.Order.Last); + + if (ModuleLoader.getInstance().shouldInsertData()) + SystemProperty.registerProperties(); + + FolderSerializationRegistry folderRegistry = FolderSerializationRegistry.get(); + if (null != folderRegistry) + { + folderRegistry.addFactories(new FolderXarWriterFactory(), new FolderXarImporterFactory()); + folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDesignWriter.Factory()); + folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDataWriter.Factory()); + folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDesignWriter.Factory()); + folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDataWriter.Factory()); + folderRegistry.addImportFactory(new SampleTypeFolderImporter.Factory()); + folderRegistry.addImportFactory(new DataClassFolderImporter.Factory()); + folderRegistry.addImportFactory(new SampleStatusFolderImporter.Factory()); + } + + AttachmentService.get().registerAttachmentParentType(ExpDataClassType.get()); + + WebdavService.get().addProvider(new ScriptsResourceProvider()); + + SystemMaintenance.addTask(new FileLinkMetricsMaintenanceTask()); + + UsageMetricsService svc = UsageMetricsService.get(); + if (null != svc) + { + svc.registerUsageMetrics(getName(), () -> { + Map results = new HashMap<>(); + + DbSchema schema = ExperimentService.get().getSchema(); + if (AssayService.get() != null) + { + Map assayMetrics = new HashMap<>(); + SQLFragment baseRunSQL = new SQLFragment("SELECT COUNT(*) FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "r").append(" WHERE lsid LIKE ?"); + SQLFragment baseProtocolSQL = new SQLFragment("SELECT * FROM ").append(ExperimentService.get().getTinfoProtocol(), "p").append(" WHERE lsid LIKE ? AND ApplicationType = ?"); + for (AssayProvider assayProvider : AssayService.get().getAssayProviders()) + { + Map protocolMetrics = new HashMap<>(); + + // Run count across all assay designs of this type + SQLFragment runSQL = new SQLFragment(baseRunSQL); + runSQL.add(Lsid.namespaceLikeString(assayProvider.getRunLSIDPrefix())); + protocolMetrics.put("runCount", new SqlSelector(schema, runSQL).getObject(Long.class)); + + // Number of assay designs of this type + SQLFragment protocolSQL = new SQLFragment(baseProtocolSQL); + protocolSQL.add(assayProvider.getProtocolPattern()); + protocolSQL.add(ExpProtocol.ApplicationType.ExperimentRun.toString()); + List protocols = new SqlSelector(schema, protocolSQL).getArrayList(Protocol.class); + protocolMetrics.put("protocolCount", protocols.size()); + + List wrappedProtocols = protocols.stream().map(ExpProtocolImpl::new).collect(Collectors.toList()); + + protocolMetrics.put("resultRowCount", assayProvider.getResultRowCount(wrappedProtocols)); + + // Primary implementation class + protocolMetrics.put("implementingClass", assayProvider.getClass()); + + assayMetrics.put(assayProvider.getName(), protocolMetrics); + } + assayMetrics.put("autoLinkedAssayCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.propertyuri = 'terms.labkey.org#AutoCopyTargetContainer'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptRunOnEditCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptRunOnImportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); + + assayMetrics.put("standardAssayWithPlateSupportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'PlateMetadata' AND floatValue = 1").getObject(Long.class)); + SQLFragment runsWithPlateSQL = new SQLFragment(""" + SELECT COUNT(*) FROM exp.experimentrun r + INNER JOIN exp.object o ON o.objectUri = r.lsid + INNER JOIN exp.objectproperty op ON op.objectId = o.objectId + WHERE op.propertyid IN ( + SELECT propertyid FROM exp.propertydescriptor WHERE name = ? AND lookupquery = ? + )"""); + assayMetrics.put("standardAssayRunsWithPlateTemplate", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateTemplate").add("PlateTemplate")).getObject(Long.class)); + assayMetrics.put("standardAssayRunsWithPlateSet", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateSet").add("PlateSet")).getObject(Long.class)); + + assayMetrics.put("assayRunsFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + + assayMetrics.put("assayResultsFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + + // metric to count the number of Luminex and Standard assay runs that were imported with > 1 data file + assayMetrics.put("assayRunsWithMultipleInputFiles", new SqlSelector(schema, """ + SELECT COUNT(*) FROM ( + SELECT sourceapplicationid, COUNT(*) AS count FROM exp.data + WHERE lsid NOT LIKE '%:RelatedFile.%' AND sourceapplicationid IN ( + SELECT rowid FROM exp.protocolapplication + WHERE lsid LIKE '%:SimpleProtocol.CoreStep' AND (protocollsid LIKE '%:LuminexAssayProtocol.%' OR protocollsid LIKE '%:GeneralAssayProtocol.%') + ) + GROUP BY sourceapplicationid + ) x WHERE count > 1""").getObject(Long.class)); + + Map sampleLookupCountMetrics = new HashMap<>(); + SQLFragment baseAssaySampleLookupSQL = new SQLFragment("SELECT COUNT(*) FROM exp.propertydescriptor WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) AND propertyuri LIKE ?"); + + SQLFragment batchAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + batchAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Batch.getPrefix() + ".%"); + sampleLookupCountMetrics.put("batchDomain", new SqlSelector(schema, batchAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment runAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + runAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%"); + sampleLookupCountMetrics.put("runDomain", new SqlSelector(schema, runAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment resultAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + resultAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); + sampleLookupCountMetrics.put("resultDomain", new SqlSelector(schema, resultAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment resultAssayMultipleSampleLookupSQL = new SQLFragment( + """ + SELECT COUNT(*) FROM ( + SELECT PD.domainid, COUNT(*) AS PropCount + FROM exp.propertydescriptor D + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) + AND propertyuri LIKE ? + GROUP BY PD.domainid + ) X WHERE X.PropCount > 1""" + ); + resultAssayMultipleSampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); + sampleLookupCountMetrics.put("resultDomainWithMultiple", new SqlSelector(schema, resultAssayMultipleSampleLookupSQL).getObject(Long.class)); + + assayMetrics.put("sampleLookupCount", sampleLookupCountMetrics); + + + // Putting these metrics at the same level as the other BooleanColumnCount metrics (e.g., sampleTypeWithBooleanColumnCount) + results.put("assayResultWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + + results.put("assayRunWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + + results.put("assay", assayMetrics); + } + + results.put("autoLinkedSampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource WHERE autoLinkTargetContainer IS NOT NULL").getObject(Long.class)); + results.put("sampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource").getObject(Long.class)); + + if (schema.getSqlDialect().isPostgreSQL()) // SQLServer does not support regular expression queries + { + Collection> numSampleCounts = new SqlSelector(schema, """ + SELECT totalCount, numberNameCount FROM + (SELECT cpastype, COUNT(*) AS totalCount from exp.material GROUP BY cpastype) t + JOIN + (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.material m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns + ON t.cpastype = ns.cpastype""").getMapCollection(); + results.put("sampleSetWithNumberNamesCount", numSampleCounts.size()); + results.put("sampleSetWithOnlyNumberNamesCount", numSampleCounts.stream().filter( + map -> (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount") + ).count()); + } + UserSchema userSchema = AuditLogService.getAuditLogSchema(User.getSearchUser(), ContainerManager.getRoot()); + FilteredTable table = (FilteredTable) userSchema.getTable(SampleTimelineAuditEvent.EVENT_TYPE); + + SQLFragment sql = new SQLFragment("SELECT COUNT(*)\n" + + " FROM (\n" + + " -- updates that are marked as lineage updates\n" + + " (SELECT DISTINCT transactionId\n" + + " FROM " + table.getRealTable().getFromSQL("").getSQL() +"\n" + + " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanTRUE() + "\n" + + " AND comment = 'Sample was updated.'\n" + + " ) a1\n" + + " JOIN\n" + + " -- but have associated entries that are not lineage updates\n" + + " (SELECT DISTINCT transactionid\n" + + " FROM " + table.getRealTable().getFromSQL("").getSQL() + "\n" + + " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanFALSE() + ") a2\n" + + " ON a1.transactionid = a2.transactionid\n" + + " )"); + + results.put("sampleLineageAuditDiscrepancyCount", new SqlSelector(schema, sql.getSQL()).getObject(Long.class)); + + results.put("sampleCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material").getObject(Long.class)); + results.put("aliquotCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material where aliquotedfromlsid IS NOT NULL").getObject(Long.class)); + results.put("sampleNullAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount IS NULL").getObject(Long.class)); + results.put("sampleNegativeAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount < 0").getObject(Long.class)); + results.put("sampleUnitsDifferCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.material m JOIN exp.materialSource s ON m.materialsourceid = s.rowid WHERE m.units != s.metricunit").getObject(Long.class)); + results.put("sampleTypesWithoutUnitsCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IS NULL").getObject(Long.class)); + results.put("sampleTypesWithMassTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('kg', 'g', 'mg', 'ug', 'ng')").getObject(Long.class)); + results.put("sampleTypesWithVolumeTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('L', 'mL', 'uL')").getObject(Long.class)); + results.put("sampleTypesWithCountTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit = ?", "unit").getObject(Long.class)); + + results.put("duplicateSampleMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + + "(SELECT name, cpastype FROM exp.material WHERE cpastype <> 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); + results.put("duplicateSpecimenMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + + "(SELECT name, cpastype FROM exp.material WHERE cpastype = 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); + String duplicateCaseInsensitiveSampleNameCountSql = """ + SELECT COUNT(*) FROM + ( + SELECT 1 AS found + FROM exp.material + WHERE materialsourceid IS NOT NULL + GROUP BY LOWER(name), materialsourceid + HAVING COUNT(*) > 1 + ) AS duplicates + """; + String duplicateCaseInsensitiveDataNameCountSql = """ + SELECT COUNT(*) FROM + ( + SELECT 1 AS found + FROM exp.data + WHERE classid IS NOT NULL + GROUP BY LOWER(name), classid + HAVING COUNT(*) > 1 + ) AS duplicates + """; + results.put("duplicateCaseInsensitiveSampleNameCount", new SqlSelector(schema, duplicateCaseInsensitiveSampleNameCountSql).getObject(Long.class)); + results.put("duplicateCaseInsensitiveDataNameCount", new SqlSelector(schema, duplicateCaseInsensitiveDataNameCountSql).getObject(Long.class)); + + results.put("dataClassCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.dataclass").getObject(Long.class)); + results.put("dataClassRowCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.data WHERE classid IN (SELECT rowid FROM exp.dataclass)").getObject(Long.class)); + results.put("dataWithDataParentsCount", new SqlSelector(schema, "SELECT COUNT(DISTINCT d.sourceApplicationId) FROM exp.data d\n" + + "JOIN exp.datainput di ON di.targetapplicationid = d.sourceapplicationid").getObject(Long.class)); + if (schema.getSqlDialect().isPostgreSQL()) + { + Collection> numDataClassObjectsCounts = new SqlSelector(schema, """ + SELECT totalCount, numberNameCount FROM + (SELECT cpastype, COUNT(*) AS totalCount from exp.data GROUP BY cpastype) t + JOIN + (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.data m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns + ON t.cpastype = ns.cpastype""").getMapCollection(); + results.put("dataClassWithNumberNamesCount", numDataClassObjectsCounts.size()); + results.put("dataClassWithOnlyNumberNamesCount", numDataClassObjectsCounts.stream().filter(map -> + (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount")).count()); + } + + results.put("ontologyPrincipalConceptCodeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE principalconceptcode IS NOT NULL").getObject(Long.class)); + results.put("ontologyLookupColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", OntologyService.conceptCodeConceptURI).getObject(Long.class)); + results.put("ontologyConceptSubtreeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptsubtree IS NOT NULL").getObject(Long.class)); + results.put("ontologyConceptImportColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptimportcolumn IS NOT NULL").getObject(Long.class)); + results.put("ontologyConceptLabelColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptlabelcolumn IS NOT NULL").getObject(Long.class)); + + results.put("scannableColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE scannable = ?", true).getObject(Long.class)); + results.put("uniqueIdColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); + results.put("sampleTypeWithUniqueIdCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.conceptURI = ?""", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); + + results.put("fileColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithMultiValueColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("sampleTypeAliquotSpecificField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ChildOnly.name()).getObject(Long.class)); + results.put("sampleTypeParentOnlyField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND (D.derivationDataScope = ? OR D.derivationDataScope IS NULL)""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ParentOnly.name()).getObject(Long.class)); + results.put("sampleTypeParentAndAliquotField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.All.name()).getObject(Long.class)); + + results.put("attachmentColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithAttachmentColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithMultiValueColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("textChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", TEXT_CHOICE_CONCEPT_URI).getObject(Long.class)); + results.put("multiValueTextChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeuri = ?", PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithDateTimeColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.DATE_TIME.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithDateColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.DATE.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithTimeColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.TIME.getTypeUri()).getObject(Long.class)); + + results.put("maxObjectObjectId", new SqlSelector(schema, "SELECT MAX(ObjectId) FROM exp.Object").getObject(Long.class)); + results.put("maxMaterialRowId", new SqlSelector(schema, "SELECT MAX(RowId) FROM exp.Material").getObject(Long.class)); + + results.putAll(ExperimentService.get().getDomainMetrics()); + + return results; + }); + } + } + + @Override + public void registerMigrationHandlers(@NotNull DatabaseMigrationService service) + { + ExperimentMigrationSchemaHandler handler = new ExperimentMigrationSchemaHandler(); + service.registerSchemaHandler(handler); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("premium", DbSchemaType.Bare).getTable("Exclusions"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("premium", DbSchemaType.Bare).getTable("ExclusionMaps"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("ExclusionId", "RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("assayrequest", DbSchemaType.Bare).getTable("RequestRunsJunction"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerSchemaHandler(new SampleTypeMigrationSchemaHandler()); + DataClassMigrationSchemaHandler dcHandler = new DataClassMigrationSchemaHandler(); + service.registerSchemaHandler(dcHandler); + ExperimentDeleteService.setInstance(dcHandler); + } + + @Override + @NotNull + public Collection getSummary(Container c) + { + Collection list = new LinkedList<>(); + int runGroupCount = ExperimentService.get().getExperiments(c, null, false, true).size(); + if (runGroupCount > 0) + list.add(StringUtilsLabKey.pluralize(runGroupCount, "Run Group")); + + User user = HttpView.currentContext().getUser(); + + Set runTypes = ExperimentService.get().getExperimentRunTypes(c); + for (ExperimentRunType runType : runTypes) + { + if (runType == ExperimentRunType.ALL_RUNS_TYPE) + continue; + + long runCount = runType.getRunCount(user, c); + if (runCount > 0) + list.add(runCount + " runs of type " + runType.getDescription()); + } + + int dataClassCount = ExperimentService.get().getDataClasses(c, false).size(); + if (dataClassCount > 0) + list.add(dataClassCount + " Data Class" + (dataClassCount > 1 ? "es" : "")); + + int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); + if (sampleTypeCount > 0) + list.add(sampleTypeCount + " Sample Type" + (sampleTypeCount > 1 ? "s" : "")); + + return list; + } + + @Override + public @NotNull ArrayList getDetailedSummary(Container c, User user) + { + ArrayList summaries = new ArrayList<>(); + + // Assay types + long assayTypeCount = AssayService.get().getAssayProtocols(c).stream().filter(p -> p.getContainer().equals(c)).count(); + if (assayTypeCount > 0) + summaries.add(new Summary(assayTypeCount, "Assay Type")); + + // Run count + int runGroupCount = ExperimentService.get().getExperiments(c, user, false, true).size(); + if (runGroupCount > 0) + summaries.add(new Summary(runGroupCount, "Assay run")); + + // Number of Data Classes + List dataClasses = ExperimentService.get().getDataClasses(c, false); + int dataClassCount = dataClasses.size(); + if (dataClassCount > 0) + summaries.add(new Summary(dataClassCount, "Data Class")); + + ExpSchema expSchema = new ExpSchema(user, c); + + // Individual Data Class row counts + { + // The table-level container filter is set to ensure data class types are included + // that may not be defined in the target container but may have rows of data in the target container + TableInfo table = ExpSchema.TableType.DataClasses.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); + + // Issue 47919: The "DataCount" column is filtered to only count data in the target container + if (table instanceof ExpDataClassTableImpl tableImpl) + tableImpl.setDataCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); + + Set columns = new LinkedHashSet<>(); + columns.add(ExpDataClassTable.Column.Name.name()); + columns.add(ExpDataClassTable.Column.DataCount.name()); + + Map results = new TableSelector(table, columns).getValueMap(String.class); + for (var entry : results.entrySet()) + { + long count = entry.getValue().longValue(); + if (count > 0) + summaries.add(new Summary(count, entry.getKey())); + } + } + + // Sample Types + int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); + if (sampleTypeCount > 0) + summaries.add(new Summary(sampleTypeCount, "Sample Type")); + + // Individual Sample Type row counts + { + // The table-level container filter is set to ensure data class types are included + // that may not be defined in the target container but may have rows of data in the target container + TableInfo table = ExpSchema.TableType.SampleSets.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); + + // Issue 51557: The "SampleCount" column is filtered to only count data in the target container + if (table instanceof ExpSampleTypeTableImpl tableImpl) + tableImpl.setSampleCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); + + Set columns = new LinkedHashSet<>(); + columns.add(ExpSampleTypeTable.Column.Name.name()); + columns.add(ExpSampleTypeTable.Column.SampleCount.name()); + + Map results = new TableSelector(table, columns).getValueMap(String.class); + for (var entry : results.entrySet()) + { + long count = entry.getValue().longValue(); + if (count > 0) + { + String name = entry.getKey(); + Summary s = name.equals("MixtureBatches") + ? new Summary(count, "Batch") + : new Summary(count, name); + summaries.add(s); + } + } + } + + return summaries; + } + + @Override + public @NotNull Set> getIntegrationTests() + { + return Set.of( + DomainImpl.TestCase.class, + DomainPropertyImpl.TestCase.class, + ExpDataTableImpl.TestCase.class, + ExperimentServiceImpl.AuditDomainUriTest.class, + ExperimentServiceImpl.LineageQueryTestCase.class, + ExperimentServiceImpl.ParseInputOutputAliasTestCase.class, + ExperimentServiceImpl.TestCase.class, + ExperimentStressTest.class, + LineagePerfTest.class, + LineageTest.class, + OntologyManager.TestCase.class, + PropertyServiceImpl.TestCase.class, + SampleTypeServiceImpl.TestCase.class, + StorageNameGenerator.TestCase.class, + StorageProvisionerImpl.TestCase.class, + UniqueValueCounterTestCase.class, + XarTestPipelineJob.TestCase.class + ); + } + + @Override + public @NotNull Collection>> getIntegrationTestFactories() + { + List>> list = new ArrayList<>(super.getIntegrationTestFactories()); + list.add(new JspTestCase("/org/labkey/experiment/api/ExpDataClassDataTestCase.jsp")); + list.add(new JspTestCase("/org/labkey/experiment/api/ExpSampleTypeTestCase.jsp")); + return list; + } + + @Override + public @NotNull Set> getUnitTests() + { + return Set.of( + GraphAlgorithms.TestCase.class, + LSIDRelativizer.TestCase.class, + Lsid.TestCase.class, + LsidUtils.TestCase.class, + PropertyController.TestCase.class, + Quantity.TestCase.class, + Unit.TestCase.class + ); + } + + @Override + @NotNull + public Collection getSchemaNames() + { + return List.of( + ExpSchema.SCHEMA_NAME, + DataClassDomainKind.PROVISIONED_SCHEMA_NAME, + SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME + ); + } + + @NotNull + @Override + public Collection getProvisionedSchemaNames() + { + return PageFlowUtil.set(DataClassDomainKind.PROVISIONED_SCHEMA_NAME, SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME); + } + + @Override + public JSONObject getPageContextJson(ContainerUser context) + { + JSONObject json = super.getPageContextJson(context); + json.put(SAMPLE_FILES_TABLE, OptionalFeatureService.get().isFeatureEnabled(SAMPLE_FILES_TABLE)); + return json; + } +} diff --git a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java index 8c4a644d3cb..42774eefd05 100644 --- a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java +++ b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java @@ -657,4 +657,115 @@ private static void fillRowId(ExpDataClassImpl dc, Domain domain, DbScope scope) } + /** + * Called from exp-26.004-26.005.sql + * Drop the lsid column from existing provisioned DataClass tables. + */ + @SuppressWarnings("unused") + @DeferredUpgrade + public static void dropProvisionedDataClassLsidColumn(ModuleContext context) + { + if (context.isNewInstall()) + return; + + try (DbScope.Transaction tx = ExperimentService.get().ensureTransaction()) + { + TableInfo source = ExperimentServiceImpl.get().getTinfoDataClass(); + List dataClasses = new TableSelector(source, null, null) + .stream(DataClass.class) + .map(ExpDataClassImpl::new) + .toList(); + + LOG.info("Dropping the lsid column from {} data classes", dataClasses.size()); + + int successCount = 0; + for (ExpDataClassImpl dc : dataClasses) + { + boolean success = dropDataClassLsid(dc); + if (success) + successCount++; + } + + LOG.info("Dropped lsid column from {} of {} data classes successfully.", successCount, dataClasses.size()); + + tx.commit(); + } + } + + private static boolean dropDataClassLsid(ExpDataClassImpl dc) + { + Domain domain = dc.getDomain(); + DataClassDomainKind kind = null; + try + { + kind = (DataClassDomainKind) domain.getDomainKind(); + } + catch (IllegalArgumentException e) + { + // pass + } + if (null == kind || null == kind.getStorageSchemaName()) + return false; + + DbSchema schema = DataClassDomainKind.getSchema(); + + StorageProvisioner.get().ensureStorageTable(domain, kind, schema.getScope()); + domain = PropertyService.get().getDomain(domain.getTypeId()); + assert (null != domain && null != domain.getStorageTableName()); + + SchemaTableInfo provisionedTable = schema.getTable(domain.getStorageTableName()); + if (provisionedTable == null) + { + LOG.error("DataClass '" + dc.getName() + "' (" + dc.getRowId() + ") has no provisioned table."); + return false; + } + + String lsidColumnName = "lsid"; + ColumnInfo lsidColumn = provisionedTable.getColumn(FieldKey.fromParts(lsidColumnName)); + if (lsidColumn == null) + { + LOG.info("No lsid column found on table '{}'. Skipping drop.", provisionedTable.getName()); + return false; + } + + Set indicesToRemove = new HashSet<>(); + for (var index : provisionedTable.getAllIndices()) + { + var indexColumns = index.columns(); + if (indexColumns.contains(lsidColumn)) + { + if (indexColumns.size() > 1) + LOG.info("Dropping index '{}' on table '{}' because it contains the lsid column.", index.name(), provisionedTable.getName()); + + indicesToRemove.add(index.name()); + } + } + + if (!indicesToRemove.isEmpty()) + StorageProvisionerImpl.get().dropTableIndices(domain, indicesToRemove); + else + LOG.info("No indices found on table '{}' that contain the lsid column.", provisionedTable.getName()); + + // Remanufacture a property descriptor that matches the original LSID property descriptor. + var spec = new PropertyStorageSpec(lsidColumnName, JdbcType.VARCHAR, 300).setNullable(false); + PropertyDescriptor pd = new PropertyDescriptor(); + pd.setContainer(dc.getContainer()); + pd.setDatabaseDefaultValue(spec.getDefaultValue()); + pd.setName(spec.getName()); + pd.setJdbcType(spec.getJdbcType(), spec.getSize()); + pd.setNullable(spec.isNullable()); + pd.setMvEnabled(spec.isMvEnabled()); + pd.setPropertyURI(DomainUtil.createUniquePropertyURI(domain.getTypeURI(), null, new CaseInsensitiveHashSet())); + pd.setDescription(spec.getDescription()); + pd.setImportAliases(spec.getImportAliases()); + pd.setScale(spec.getSize()); + DomainPropertyImpl dp = new DomainPropertyImpl((DomainImpl) domain, pd); + + LOG.debug("Dropping lsid column from table '{}' for data class '{}' in folder {}.", provisionedTable.getName(), dc.getName(), dc.getContainer().getPath()); + StorageProvisionerImpl.get().dropProperties(domain, Set.of(dp)); + + return true; + } + + } diff --git a/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java b/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java index 63155e503b2..c83453b6618 100644 --- a/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java +++ b/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java @@ -100,7 +100,6 @@ public class DataClassDomainKind extends AbstractDomainKind implements ExpDataClassDataTable @@ -167,8 +170,8 @@ public class ExpDataClassDataTableImpl extends ExpRunItemTableImpl ALLOWED_IMPORT_HEADERS; static { DATA_CLASS_ALT_MERGE_KEYS = new HashSet<>(Arrays.asList(Column.ClassId.name(), Name.name())); - DATA_CLASS_ALT_UPDATE_KEYS = new HashSet<>(Arrays.asList(Column.LSID.name(), Column.RowId.name())); - ALLOWED_IMPORT_HEADERS = new HashSet<>(Arrays.asList("name", "description", "flag", "comment", "alias", "datafileurl")); + DATA_CLASS_ALT_UPDATE_KEYS = new HashSet<>(Arrays.asList(Column.RowId.name())); + ALLOWED_IMPORT_HEADERS = new HashSet<>(Arrays.asList("description", "flag", "comment", "alias", "datafileurl")); } private Map _vocabularyDomainProviders; @@ -689,7 +692,7 @@ public SQLFragment getFromSQLExpanded(String alias, Set selectedColumn // all columns from dataclass property table except key columns Set pCols = new CaseInsensitiveHashSet(provisioned.getColumnNameSet()); pCols.remove("name"); - pCols.remove("lsid"); + pCols.remove("lsid"); // TODO remove pCols.remove("rowId"); boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, pCols); @@ -837,12 +840,6 @@ protected SimpleFilter.FilterClause getContainerFilterClause(ContainerFilter fil // UpdatableTableInfo // - @Override - public @Nullable CaseInsensitiveHashSet skipProperties() - { - return super.skipProperties(); - } - @Nullable @Override public CaseInsensitiveHashMap remapSchemaColumns() @@ -859,8 +856,6 @@ public CaseInsensitiveHashMap remapSchemaColumns() @Override public @Nullable Set getAltMergeKeys(DataIteratorContext context) { - if (context.getInsertOption().updateOnly && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - return getAltKeysForUpdate(); return DATA_CLASS_ALT_MERGE_KEYS; } @@ -878,19 +873,23 @@ public Set getAltKeysForUpdate() if (context.getInsertOption().allowUpdate) { - boolean isUpdateUsingLsid = context.getInsertOption().updateOnly && - colNameMap.containsKey(ExpDataTable.Column.LSID.name()) && - context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); - - if (isUpdateUsingLsid) + if (context.getInsertOption().updateOnly) { - keyColumnNames.add(Column.LSID.name()); + // For UPDATE: prefer RowId, else require Name (with ClassId) + if (colNameMap.containsKey(Column.RowId.name())) + keyColumnNames.add(Column.RowId.name()); + else if (colNameMap.containsKey(Name.name())) + { + keyColumnNames.add(Column.ClassId.name()); + keyColumnNames.add(Name.name()); + } + else + throw new IllegalArgumentException("Either RowId or Name is required to update DataClass Data."); } else { - Set altMergeKeys = getAltMergeKeys(context); - if (altMergeKeys != null) - keyColumnNames.addAll(altMergeKeys); + // For MERGE: use merge keys (ClassId + Name) + keyColumnNames.addAll(DATA_CLASS_ALT_MERGE_KEYS); } } @@ -995,7 +994,11 @@ public DataIterator getDataIterator(DataIteratorContext context) if (null == input) return null; // Can happen if context has errors + boolean isMerge = context.getInsertOption() == QueryUpdateService.InsertOption.MERGE; + boolean isUpdate = context.getInsertOption() == QueryUpdateService.InsertOption.UPDATE; + var drop = new CaseInsensitiveHashSet(); + var keysCheck = new CaseInsensitiveHashSet(); for (int i = 1; i <= input.getColumnCount(); i++) { String name = input.getColumnInfo(i).getName(); @@ -1003,18 +1006,44 @@ public DataIterator getDataIterator(DataIteratorContext context) boolean isContainerField = name.equalsIgnoreCase("Container") || name.equalsIgnoreCase("Folder"); if (isContainerField) { - if (context.getInsertOption().updateOnly || !context.isCrossFolderImport()) + if (isUpdate || !context.isCrossFolderImport()) drop.add(name); } + else if (ExpDataTable.Column.Name.name().equalsIgnoreCase(name)) + { + keysCheck.add(ExpDataTable.Column.Name.name()); + } else if (isReservedHeader(name)) + { + if (ExpDataTable.Column.RowId.name().equalsIgnoreCase(name)) + { + keysCheck.add(ExpDataTable.Column.RowId.name()); + if (isUpdate) + continue; + + // While accepting RowId during merge is not our preferred behavior, we want to give users a way + // to opt-in to the old behavior where RowId is accepted and ignored. + if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE)) + { + context.getErrors().addRowError(new ValidationException("RowId is not accepted when merging data. Specify only the data name instead.", ExpMaterialTable.Column.RowId.name())); + return null; + } + } + + if (ExpDataTable.Column.LSID.name().equalsIgnoreCase(name)) + keysCheck.add(ExpDataTable.Column.LSID.name()); drop.add(name); + } + else if (Column.ClassId.name().equalsIgnoreCase(name)) drop.add(name); } - if (context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) + + if ((isMerge || isUpdate) && keysCheck.size() == 1 && keysCheck.contains(LSID.name())) { - drop.remove("lsid"); - drop.remove("rowid");// keep rowid for audit log + String message = String.format("LSID is no longer accepted as a key for data %s. Specify a RowId or Name instead.", isMerge ? "merge" : "update"); + context.getErrors().addRowError(new ValidationException(message, LSID.name())); + return null; } if (!drop.isEmpty()) @@ -1269,7 +1298,7 @@ public Map> getExistingRows(User user, Container co Set lsids = new HashSet<>(); for (Map dataRow : dataRows.values()) - lsids.add((String) dataRow.get("lsid")); + lsids.add((String) dataRow.get("lsid")); // ? List seeds = ExperimentServiceImpl.get().getExpDatasByLSID(lsids); ExperimentServiceImpl.get().addRowsParentsFields(new HashSet<>(seeds), dataRows, user, container); @@ -1381,197 +1410,54 @@ else if (name != null) @Override protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - Map result = super.updateRow(user, container, row, oldRow, allowOwner, retainCreation); - - // add MaterialInput/DataInputs field from parent alias - try - { - Map parentAliases = _dataClass.getImportAliases(); - for (String alias : parentAliases.keySet()) - { - if (row.containsKey(alias)) - result.put(parentAliases.get(alias), result.get(alias)); - } - } - catch (IOException e) - { - throw new RuntimeException(e); - } - - return result; - - } - - // DataClassDataUpdateService needs to skip Attachment column convert before _update - // TODO: move override when implementing consolidating dataclass update methods - @Override - protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException { - if (PropertyType.ATTACHMENT == col.getPropertyType()) - return value; - - if (ALIAS_CONCEPT_URI.equals(col.getConceptURI())) - return value; - - return super.convertColumnValue(col, value, user, c, fileLinkDirPath); + throw new UnsupportedOperationException("_update() is no longer supported for dataclass"); } @Override - protected TableInfo getTableInfoForConversion() + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) throws SQLException, ValidationException // TODO remove { - // getDBTable() returns exp.data table, which lacks properties fields. - // TODO: this method can be removed when implementing consolidating dataclass update methods - return getQueryTable(); + throw new UnsupportedOperationException("_update() is no longer supported for dataclass"); } @Override - protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) throws SQLException, ValidationException + public List> updateRows(User user, Container container, List> rows, List> oldKeys, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException { - // LSID was stripped by super.updateRows() and is needed to insert into the dataclass provisioned table - String lsid = (String)oldRow.get("lsid"); - if (lsid == null) - throw new ValidationException("lsid required to update row"); - - String newName = (String) row.get(Name.name()); - String oldName = (String) oldRow.get(Name.name()); - boolean hasNameChange = !StringUtils.isEmpty(newName) && !newName.equals(oldName); - - // Replace attachment columns with filename and keep AttachmentFiles - Map rowStripped = new CaseInsensitiveHashMap<>(); - Map attachments = new CaseInsensitiveHashMap<>(); - for (Map.Entry entry : row.entrySet()) - { - String name = entry.getKey(); - Object value = entry.getValue(); - if (isAttachmentProperty(name)) - { - if (value instanceof AttachmentFile file) - { - if (null != file.getFilename()) - { - rowStripped.put(name, file.getFilename()); - attachments.put(name, value); - } - } - else if (value != null && !StringUtils.isEmpty(String.valueOf(value))) - { - // Issue 53498: string value for attachment field is not allowed - throw new ValidationException("Cannot upload '" + value + "' to Attachment type field '" + name + "'."); - } - else - rowStripped.put(name, value); // if null or empty, remove attachment - } - else - { - rowStripped.put(name, value); - } - } - - for (String vocabularyDomainName : getVocabularyDomainProviders().keySet()) - { - DataClassVocabularyProviderProperties fieldVocabularyDomainProvider = getVocabularyDomainProviders().get(vocabularyDomainName); - if (fieldVocabularyDomainProvider != null) - rowStripped.putAll(fieldVocabularyDomainProvider.conceptURIVocabularyDomainProvider().getUpdateRowProperties(user, c, rowStripped, oldRow, getAttachmentParentFactory(), fieldVocabularyDomainProvider.sourceColumnName(), fieldVocabularyDomainProvider.vocabularyDomainName(), getVocabularyDomainProviders().size() > 1)); - } + if (rows == null || rows.isEmpty()) + return Collections.emptyList(); - // update exp.data - Map ret = new CaseInsensitiveHashMap<>(super._update(user, c, rowStripped, oldRow, keys)); + Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; + recordDataIteratorUsed(configParameters); - Integer rowId = (Integer) oldRow.get("RowId"); - if (rowId == null) - throw new ValidationException("RowId required to update row"); - keys = new Object[] {rowId}; - TableInfo t = _dataClassDataTableSupplier.get(); - if (t.getColumnNameSet().stream().anyMatch(rowStripped::containsKey)) - { - ret.putAll(Table.update(user, t, rowStripped, t.getColumn("rowId"), keys, null, Level.DEBUG)); - } + List> results = new ArrayList<>(); + int index = 0; - ExpDataImpl data = null; - if (hasNameChange) + while (index < rows.size()) { - data = ExperimentServiceImpl.get().getExpData(lsid); - ExperimentService.get().addObjectLegacyName(data.getObjectId(), ExperimentServiceImpl.getNamespacePrefix(ExpData.class), oldName, user); - } + // TODO: check for duplicates - // update comment - if (row.containsKey("flag") || row.containsKey("comment")) - { - Object o = row.containsKey("flag") ? row.get("flag") : row.get("comment"); - String flag = Objects.toString(o, null); + CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); - if (data == null) - data = ExperimentServiceImpl.get().getExpData(lsid); - if (data != null) - data.setComment(user, flag); - } + int nextIndex = index + 1; + while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) + nextIndex++; - // update aliases - if (row.containsKey("Alias")) - AliasInsertHelper.handleInsertUpdate(getContainer(), user, lsid, ExperimentService.get().getTinfoDataAliasMap(), row.get("Alias")); + List> rowsToProcess = rows.subList(index, nextIndex); + index = nextIndex; - // handle attachments - removePreviousAttachments(user, c, row, oldRow); - ret.putAll(attachments); - addAttachments(user, c, ret, lsid); + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters); + List> subRet = super._updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); - // search index done in postcommit + if (context.getErrors().hasErrors()) + throw context.getErrors(); - ret.put("RowId", oldRow.get("RowId")); // return rowId for SearchService - ret.put("lsid", lsid); - return ret; - } + if (subRet != null) + results.addAll(subRet); - @Override - public List> updateRows(User user, Container container, List> rows, List> oldKeys, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - boolean useDib = false; - if (rows != null && !rows.isEmpty() && oldKeys == null) - useDib = rows.get(0).containsKey("lsid"); - - useDib = useDib && hasUniformKeys(rows); - - List> results; - if (useDib) - { - Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; - finalConfigParameters.put(ExperimentService.QueryOptions.UseLsidForUpdate, true); - - recordDataIteratorUsed(configParameters); - results = super._updateRowsUsingDIB(user, container, rows, getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters), extraScriptContext); + // TODO: record partitions } - else - { - results = super.updateRows(user, container, rows, oldKeys, errors, configParameters, extraScriptContext); - DbScope scope = getUserSchema().getDbSchema().getScope(); - scope.addCommitTask(() -> - { - List orderedRowIds = new ArrayList<>(); - for (Map result : results) - { - Long rowId = MapUtils.getLong(result, RowId.name()); - if (rowId != null) - orderedRowIds.add(rowId); - } - Collections.sort(orderedRowIds); - - // Issue 51263: order by RowId to reduce deadlock - ListUtils.partition(orderedRowIds, 100).forEach(sublist -> - SearchService.get().defaultTask().getQueue(_dataClass.getContainer(), SearchService.PRIORITY.modified).addRunnable((q) -> - { - for (ExpDataImpl expData : ExperimentServiceImpl.get().getExpDatas(sublist)) - expData.index(q, null); - }) - ); - }, DbScope.CommitTaskOption.POSTCOMMIT); - - /* setup mini dataiterator pipeline to process lineage */ - DataIterator di = _toDataIteratorBuilder("updateRows.lineage", results).getDataIterator(new DataIteratorContext()); - ExpDataIterators.derive(user, container, di, false, _dataClass, true); - } + // summary audit? return results; } @@ -1597,58 +1483,12 @@ protected int truncateRows(User user, Container container) return ExperimentServiceImpl.get().truncateDataClass(_dataClass, user, container); } - private void removePreviousAttachments(User user, Container c, Map newRow, Map oldRow) - { - Lsid lsid = new Lsid((String)oldRow.get("LSID")); - - for (Map.Entry entry : newRow.entrySet()) - { - if (isAttachmentProperty(entry.getKey()) && oldRow.get(entry.getKey()) != null) - { - AttachmentParent parent = new ExpDataClassAttachmentParent(c, lsid); - - AttachmentService.get().deleteAttachment(parent, (String) oldRow.get(entry.getKey()), user); - } - } - } - @Override protected Domain getDomain() { return _dataClass.getDomain(); } - private void addAttachments(User user, Container c, Map row, String lsidStr) - { - if (row != null && lsidStr != null) - { - ArrayList attachmentFiles = new ArrayList<>(); - for (Map.Entry entry : row.entrySet()) - { - if (isAttachmentProperty(entry.getKey()) && entry.getValue() instanceof AttachmentFile file) - { - if (null != file.getFilename()) - attachmentFiles.add(file); - } - } - - if (!attachmentFiles.isEmpty()) - { - Lsid lsid = new Lsid(lsidStr); - AttachmentParent parent = new ExpDataClassAttachmentParent(c, lsid); - - try - { - AttachmentService.get().addAttachments(parent, attachmentFiles, user); - } - catch (IOException e) - { - throw UnexpectedException.wrap(e); - } - } - } - } - @Override public void configureDataIteratorContext(DataIteratorContext context) { diff --git a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java index c54d96aeec5..7214de841e8 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java @@ -1,978 +1,985 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.experiment.api; - -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONObject; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.SqlSelector; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.exp.ExperimentDataHandler; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.Handler; -import org.labkey.api.exp.ObjectProperty; -import org.labkey.api.exp.XarFormatException; -import org.labkey.api.exp.XarSource; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataClass; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.query.ExpDataClassDataTable; -import org.labkey.api.exp.query.ExpDataTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.files.FileContentService; -import org.labkey.api.pipeline.PipeRoot; -import org.labkey.api.pipeline.PipelineJob; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.QueryRowReference; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.ValidationException; -import org.labkey.api.search.SearchResultTemplate; -import org.labkey.api.search.SearchScope; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DataClassReadPermission; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.MediaReadPermission; -import org.labkey.api.security.permissions.MoveEntitiesPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.GUID; -import org.labkey.api.util.HtmlString; -import org.labkey.api.util.LinkBuilder; -import org.labkey.api.util.MimeMap; -import org.labkey.api.util.NetworkDrive; -import org.labkey.api.util.Pair; -import org.labkey.api.util.Path; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.URLHelper; -import org.labkey.api.util.InputBuilder; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.NavTree; -import org.labkey.api.view.ViewContext; -import org.labkey.api.webdav.SimpleDocumentResource; -import org.labkey.api.webdav.WebdavResource; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.vfs.FileLike; -import org.labkey.vfs.FileSystemLike; - -import java.io.File; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -import static org.labkey.api.exp.query.ExpSchema.SCHEMA_EXP_DATA; - -public class ExpDataImpl extends AbstractRunItemImpl implements ExpData -{ - public enum DataOperations - { - Edit("editing", UpdatePermission.class), - EditLineage("editing lineage", UpdatePermission.class), - Delete("deleting", DeletePermission.class), - Move("moving", MoveEntitiesPermission.class); - - private final String _description; // used as a suffix in messaging users about what is not allowed - private final Class _permissionClass; - - DataOperations(String description, Class permissionClass) - { - _description = description; - _permissionClass = permissionClass; - } - - public String getDescription() - { - return _description; - } - - public Class getPermissionClass() - { - return _permissionClass; - } - } - - public static final SearchService.SearchCategory expDataCategory = new SearchService.SearchCategory("data", "ExpData", false) { - @Override - public Set getPermittedContainerIds(User user, Map containers) - { - return getPermittedContainerIds(user, containers, DataClassReadPermission.class); - } - }; - public static final SearchService.SearchCategory expMediaDataCategory = new SearchService.SearchCategory("mediaData", "ExpData for media objects", false) { - @Override - public Set getPermittedContainerIds(User user, Map containers) - { - return getPermittedContainerIds(user, containers, MediaReadPermission.class); - } - }; - - /** Cache this because it can be expensive to recompute */ - private Boolean _finalRunOutput; - - /** - * Temporary mapping until experiment.xml contains the mime type - */ - private static final MimeMap MIME_MAP = new MimeMap(); - - static public List fromDatas(List datas) - { - List ret = new ArrayList<>(datas.size()); - for (Data data : datas) - { - ret.add(new ExpDataImpl(data)); - } - return ret; - } - - // For serialization - protected ExpDataImpl() {} - - public ExpDataImpl(Data data) - { - super(data); - } - - @Override - public void setComment(User user, String comment) throws ValidationException - { - setComment(user, comment, true); - } - - @Override - public void setComment(User user, String comment, boolean index) throws ValidationException - { - super.setComment(user, comment); - - if (index) - index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); - } - - @Override - @Nullable - public ActionURL detailsURL() - { - DataType dataType = getDataType(); - if (dataType != null) - { - ActionURL url = dataType.getDetailsURL(this); - if (url != null) - return url; - } - - return _object.detailsURL(); - } - - @Override - public @Nullable QueryRowReference getQueryRowReference() - { - return getQueryRowReference(null); - } - - @Override - public @Nullable QueryRowReference getQueryRowReference(@Nullable User user) - { - ExpDataClassImpl dc = getDataClass(user); - if (dc != null) - return new QueryRowReference(getContainer(), SCHEMA_EXP_DATA, dc.getName(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); - - // Issue 40123: see MedImmuneDataHandler MEDIMMUNE_DATA_TYPE, this claims the "Data" namespace - DataType type = getDataType(); - if (type != null) - { - QueryRowReference queryRowReference = type.getQueryRowReference(this); - if (queryRowReference != null) - return queryRowReference; - } - - return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Data.name(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); - } - - @Override - public List getTargetApplications() - { - return getTargetApplications(new SimpleFilter(FieldKey.fromParts("DataId"), getRowId()), ExperimentServiceImpl.get().getTinfoDataInput()); - } - - @Override - public List getTargetRuns() - { - return getTargetRuns(ExperimentServiceImpl.get().getTinfoDataInput(), "DataId"); - } - - @Override - public DataType getDataType() - { - return ExperimentService.get().getDataType(getLSIDNamespacePrefix()); - } - - @Override - public void setDataFileURI(URI uri) - { - ensureUnlocked(); - _object.setDataFileUrl(ExpData.normalizeDataFileURI(uri)); - } - - @Override - public void save(User user) - { - // Replace the default "Data" cpastype if the Data belongs to a DataClass - ExpDataClassImpl dataClass = getDataClass(); - if (dataClass != null && ExpData.DEFAULT_CPAS_TYPE.equals(getCpasType())) - setCpasType(dataClass.getLSID()); - - boolean isNew = getRowId() == 0; - save(user, ExperimentServiceImpl.get().getTinfoData(), true); - - if (isNew) - { - if (dataClass != null) - { - Map map = new HashMap<>(); - map.put("lsid", getLSID()); - Table.insert(user, dataClass.getTinfo(), map); - } - } - index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); - } - - @Override - protected void save(User user, TableInfo table, boolean ensureObject) - { - assert ensureObject; - super.save(user, table, true); - } - - @Override - public URI getDataFileURI() - { - String url = _object.getDataFileUrl(); - if (url == null) - return null; - try - { - return new URI(_object.getDataFileUrl()); - } - catch (URISyntaxException use) - { - return null; - } - } - - @Override - public ExperimentDataHandler findDataHandler() - { - return Handler.Priority.findBestHandler(ExperimentServiceImpl.get().getExperimentDataHandlers(), this); - } - - @Override - public String getDataFileUrl() - { - return _object.getDataFileUrl(); - } - - @Override - public boolean hasFileScheme() - { - return !FileUtil.hasCloudScheme(getDataFileUrl()); - } - - @Override - @Nullable - public File getFile() - { - return _object.getFile(); - } - - @Override - public @Nullable FileLike getFileLike() - { - return _object.getFileLike(); - } - - @Override - @Nullable - public java.nio.file.Path getFilePath() - { - return _object.getFilePath(); - } - - @Override - public boolean isInlineImage() - { - return null != getFile() && MIME_MAP.isInlineImageFor(getFile()); - } - - @Override - public void delete(User user) - { - delete(user, true); - } - - @Override - public void delete(User user, boolean deleteRunsUsingData) - { - ExperimentServiceImpl.get().deleteDataByRowIds(user, getContainer(), Collections.singleton(getRowId()), deleteRunsUsingData); - } - - public String getMimeType() - { - if (null != getDataFileUrl()) - return MIME_MAP.getContentTypeFor(getDataFileUrl()); - else - return null; - } - - @Override - public boolean isFileOnDisk() - { - java.nio.file.Path f = getFilePath(); - if (f != null) - if (!FileUtil.hasCloudScheme(f)) - return NetworkDrive.exists(f.toFile()) && !Files.isDirectory(f); - else - return Files.exists(f); - else - return false; - } - - public boolean isPathAccessible() - { - java.nio.file.Path path = getFilePath(); - return (null != path && Files.exists(path)); - } - - @Override - public String getCpasType() - { - String result = _object.getCpasType(); - if (result != null) - return result; - - ExpDataClass dataClass = getDataClass(); - if (dataClass != null) - return dataClass.getLSID(); - - return ExpData.DEFAULT_CPAS_TYPE; - } - - public void setGenerated(boolean generated) - { - ensureUnlocked(); - _object.setGenerated(generated); - } - - @Override - public boolean isGenerated() - { - return _object.isGenerated(); - } - - @Override - public boolean isFinalRunOutput() - { - if (_finalRunOutput == null) - { - ExpRun run = getRun(); - _finalRunOutput = run != null && run.isFinalOutput(this); - } - return _finalRunOutput.booleanValue(); - } - - @Override - @Nullable - public ExpDataClassImpl getDataClass() - { - return getDataClass(null); - } - - @Override - @Nullable - public ExpDataClassImpl getDataClass(@Nullable User user) - { - if (_object.getClassId() != null && getContainer() != null) - { - if (user == null) - return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId()); - else - return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId(), true); - } - - return null; - } - - @Override - public void importDataFile(PipelineJob job, XarSource xarSource) throws ExperimentException - { - String dataFileURL = getDataFileUrl(); - if (dataFileURL == null) - return; - - if (xarSource.shouldIgnoreDataFiles()) - { - job.debug("Skipping load of data file " + dataFileURL + " based on the XAR source"); - return; - } - - job.debug("Trying to load data file " + dataFileURL + " into the system"); - - java.nio.file.Path path = FileUtil.stringToPath(getContainer(), dataFileURL); - - if (!Files.exists(path)) - { - job.debug("Unable to find the data file " + FileUtil.getAbsolutePath(getContainer(), path) + " on disk."); - return; - } - - // Check that the file is under the pipeline root to prevent users from referencing a file that they - // don't have permission to import - PipeRoot pr = PipelineService.get().findPipelineRoot(job.getContainer()); - if (!xarSource.allowImport(pr, job.getContainer(), path)) - { - if (pr == null) - { - job.warn("No pipeline root was set, skipping load of file " + FileUtil.getAbsolutePath(getContainer(), path)); - return; - } - job.debug("The data file " + FileUtil.getAbsolutePath(getContainer(), path) + " is not under the folder's pipeline root: " + pr + ". It will not be loaded directly, but may be loaded if referenced from other files that are under the pipeline root."); - return; - } - - ExperimentDataHandler handler = findDataHandler(); - try - { - handler.importFile(this, FileSystemLike.wrapFile(path), job.getInfo(), job.getLogger(), xarSource.getXarContext()); - } - catch (ExperimentException e) - { - throw new XarFormatException(e); - } - - job.debug("Finished trying to load data file " + dataFileURL + " into the system"); - } - - // Get all text and int strings from the data class for indexing - private void getIndexValues( - Map props, - @NotNull ExpDataClassDataTableImpl table, - Set identifiersHi, - Set identifiersMed, - Set identifiersLo, - Set keywordHi, - Set keywordMed, - Set keywordsLo, - JSONObject jsonData - ) - { - CaseInsensitiveHashSet skipColumns = new CaseInsensitiveHashSet(); - for (ExpDataClassDataTable.Column column : ExpDataClassDataTable.Column.values()) - skipColumns.add(column.name()); - skipColumns.add("Ancestors"); - skipColumns.add("Container"); - - processIndexValues(props, table, skipColumns, identifiersHi, identifiersMed, identifiersLo, keywordHi, keywordMed, keywordsLo, jsonData); - } - - @Override - @NotNull - public Collection getAliases() - { - TableInfo mapTi = ExperimentService.get().getTinfoDataAliasMap(); - TableInfo ti = ExperimentService.get().getTinfoAlias(); - SQLFragment sql = new SQLFragment() - .append("SELECT a.name FROM ").append(mapTi, "m") - .append(" JOIN ").append(ti, "a") - .append(" ON m.alias = a.RowId WHERE m.lsid = ? "); - sql.add(getLSID()); - ArrayList aliases = new SqlSelector(mapTi.getSchema(), sql).getArrayList(String.class); - return Collections.unmodifiableList(aliases); - } - - @Override - public String getDocumentId() - { - String dataClassName = "-"; - ExpDataClass dc = getDataClass(); - if (dc != null) - dataClassName = dc.getName(); - // why not just data:rowId? - return "data:" + new Path(getContainer().getId(), dataClassName, Long.toString(getRowId())).encode(); - } - - @Override - public Map getObjectProperties() - { - return getObjectProperties(getDataClass()); - } - - @Override - public Map getObjectProperties(@Nullable User user) - { - return getObjectProperties(getDataClass(user)); - } - - private Map getObjectProperties(ExpDataClassImpl dataClass) - { - HashMap ret = new HashMap<>(super.getObjectProperties()); - var ti = null == dataClass ? null : dataClass.getTinfo(); - if (null != ti) - { - ret.putAll(getObjectProperties(ti)); - } - return ret; - } - - private static Pair getRowIdClassNameContainerFromDocumentId(String resourceIdentifier, Map dcCache) - { - if (resourceIdentifier.startsWith("data:")) - resourceIdentifier = resourceIdentifier.substring("data:".length()); - - Path path = Path.parse(resourceIdentifier); - if (path.size() != 3) - return null; - String containerId = path.get(0); - String dataClassName = path.get(1); - String rowIdString = path.get(2); - - long rowId; - try - { - rowId = Long.parseLong(rowIdString); - if (rowId == 0) - return null; - } - catch (NumberFormatException ex) - { - return null; - } - - Container c = ContainerManager.getForId(containerId); - if (c == null) - return null; - - ExpDataClass dc = null; - if (!StringUtils.isEmpty(dataClassName) && !dataClassName.equals("-")) - { - String dcKey = containerId + '-' + dataClassName; - dc = dcCache.computeIfAbsent(dcKey, (x) -> ExperimentServiceImpl.get().getDataClass(c, dataClassName)); - } - - return new Pair<>(rowId, dc); - } - - @Nullable - public static ExpDataImpl fromDocumentId(String resourceIdentifier) - { - Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, new HashMap<>()); - if (rowIdDataClass == null) - return null; - - Long rowId = rowIdDataClass.first; - ExpDataClass dc = rowIdDataClass.second; - - if (dc != null) - return ExperimentServiceImpl.get().getExpData(dc, rowId); - else - return ExperimentServiceImpl.get().getExpData(rowId); - } - - @Nullable - public static Map fromDocumentIds(Collection resourceIdentifiers) - { - Map rowIdIdentifierMap = new LongHashMap<>(); - Map dcCache = new HashMap<>(); - Map dcMap = new LongHashMap<>(); - Map> dcRowIdMap = new LongHashMap<>(); // data rowIds with dataClass - List rowIds = new ArrayList<>(); // data rowIds without dataClass - for (String resourceIdentifier : resourceIdentifiers) - { - Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, dcCache); - if (rowIdDataClass == null) - continue; - - Long rowId = rowIdDataClass.first; - ExpDataClass dc = rowIdDataClass.second; - - rowIdIdentifierMap.put(rowId, resourceIdentifier); - - if (dc != null) - { - dcMap.put(dc.getRowId(), dc); - dcRowIdMap - .computeIfAbsent(dc.getRowId(), (k) -> new ArrayList<>()) - .add(rowId); - } - else - rowIds.add(rowId); - } - - List expDatas = new ArrayList<>(); - if (!rowIds.isEmpty()) - expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(rowIds)); - - if (!dcRowIdMap.isEmpty()) - { - for (Long dataClassId : dcRowIdMap.keySet()) - { - ExpDataClass dc = dcMap.get(dataClassId); - if (dc != null) - expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(dc, dcRowIdMap.get(dataClassId))); - } - } - - Map identifierDatas = new HashMap<>(); - for (ExpData data : expDatas) - { - identifierDatas.put(rowIdIdentifierMap.get(data.getRowId()), data); - } - - return identifierDatas; - } - - @Override - public @Nullable URI getWebDavURL(@NotNull FileContentService.PathType type) - { - java.nio.file.Path path = getFilePath(); - if (path == null) - { - return null; - } - - Container c = getContainer(); - if (c == null) - { - return null; - } - - return FileContentService.get().getWebDavUrl(path, c, type); - } - - @Override - public @Nullable WebdavResource createIndexDocument(@Nullable TableInfo tableInfo) - { - Container container = getContainer(); - if (container == null) - return null; - - Map props = new HashMap<>(); - JSONObject jsonData = new JSONObject(); - Set keywordsHi = new HashSet<>(); - Set keywordsMed = new HashSet<>(); - Set keywordsLo = new HashSet<>(); - - Set identifiersHi = new HashSet<>(); - Set identifiersMed = new HashSet<>(); - Set identifiersLo = new HashSet<>(); - - StringBuilder body = new StringBuilder(); - - // Name is an identifier with the highest weight - identifiersHi.add(getName()); - keywordsMed.add(getName()); // also add to keywords since those are stemmed - - // Description is added as a keywordsLo -- in Biologics it is common for the description to - // contain names of other DataClasses, e.g., "Mature desK of PS-10", which would be tokenized as - // [mature, desk, ps, 10] if added it as a keyword so we lower its priority to avoid useless results. - // CONSIDER: tokenize the description and extract identifiers - if (null != getDescription()) - keywordsLo.add(getDescription()); - - String comment = getComment(); - if (comment != null) - keywordsMed.add(comment); - - // Add aliases in parentheses in the title - StringBuilder title = new StringBuilder(getName()); - Collection aliases = getAliases(); - if (!aliases.isEmpty()) - { - title.append(" (").append(StringUtils.join(aliases, ", ")).append(")"); - identifiersHi.addAll(aliases); - } - - ExpDataClassImpl dc = getDataClass(User.getSearchUser()); - if (dc != null) - { - ActionURL show = new ActionURL(ExperimentController.ShowDataClassAction.class, container).addParameter("rowId", dc.getRowId()); - NavTree t = new NavTree(dc.getName(), show); - String nav = NavTree.toJS(Collections.singleton(t), null, false, true).toString(); - props.put(SearchService.PROPERTY.navtrail.toString(), nav); - - props.put(DataSearchResultTemplate.PROPERTY, dc.getName()); - body.append(dc.getName()); - - if (tableInfo == null) - tableInfo = QueryService.get().getUserSchema(User.getSearchUser(), container, SCHEMA_EXP_DATA).getTable(dc.getName()); - - if (!(tableInfo instanceof ExpDataClassDataTableImpl expDataClassDataTable)) - throw new IllegalArgumentException(String.format("Unable to index data class item in %s. Table must be an instance of %s", dc.getName(), ExpDataClassDataTableImpl.class.getName())); - - if (!expDataClassDataTable.getDataClass().equals(dc)) - throw new IllegalArgumentException(String.format("Data class table mismatch for %s", dc.getName())); - - // Collect other text columns and lookup display columns - getIndexValues(props, expDataClassDataTable, identifiersHi, identifiersMed, identifiersLo, keywordsHi, keywordsMed, keywordsLo, jsonData); - } - - // === Stored, not indexed - if (dc != null && dc.isMedia()) - props.put(SearchService.PROPERTY.categories.toString(), expMediaDataCategory.toString()); - else - props.put(SearchService.PROPERTY.categories.toString(), expDataCategory.toString()); - props.put(SearchService.PROPERTY.title.toString(), title.toString()); - props.put(SearchService.PROPERTY.jsonData.toString(), jsonData); - - ActionURL view = ExperimentController.ExperimentUrlsImpl.get().getDataDetailsURL(this); - view.setExtraPath(container.getId()); - String docId = getDocumentId(); - - // Generate a summary explicitly instead of relying on a summary to be extracted - // from the document body. Placing lookup values and the description in the body - // would tokenize using the English analyzer and index "PS-12" as ["ps", "12"] which leads to poor results. - StringBuilder summary = new StringBuilder(); - if (StringUtils.isNotEmpty(getDescription())) - summary.append(getDescription()).append("\n"); - - appendTokens(summary, keywordsMed); - appendTokens(summary, identifiersMed); - appendTokens(summary, identifiersLo); - - props.put(SearchService.PROPERTY.summary.toString(), summary); - - return new ExpDataResource( - getRowId(), - new Path(docId), - docId, - container.getEntityId(), - "text/plain", - body.toString(), - view, - props, - getCreatedBy(), - getCreated(), - getModifiedBy(), - getModified() - ); - } - - private static void appendTokens(StringBuilder sb, Collection toks) - { - if (toks.isEmpty()) - return; - - sb.append(toks.stream().map(s -> s.length() > 30 ? StringUtilsLabKey.leftSurrogatePairFriendly(s, 30) + "\u2026" : s).collect(Collectors.joining(", "))).append("\n"); - } - - private static class ExpDataResource extends SimpleDocumentResource - { - final long _rowId; - - public ExpDataResource(long rowId, Path path, String documentId, GUID containerId, String contentType, String body, URLHelper executeUrl, Map properties, User createdBy, Date created, User modifiedBy, Date modified) - { - super(path, documentId, containerId, contentType, body, executeUrl, createdBy, created, modifiedBy, modified, properties); - _rowId = rowId; - } - - @Override - public void setLastIndexed(long ms, long modified) - { - ExperimentServiceImpl.get().setDataLastIndexed(_rowId, ms); - } - } - - public static class DataSearchResultTemplate implements SearchResultTemplate - { - public static final String NAME = "data"; - public static final String PROPERTY = "dataclass"; - - @Nullable - @Override - public String getName() - { - return NAME; - } - - private ExpDataClass getDataClass() - { - if (HttpView.hasCurrentView()) - { - ViewContext ctx = HttpView.currentContext(); - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - return ExperimentService.get().getDataClass(ctx.getContainer(), dataclass, true); - } - return null; - } - - @Nullable - @Override - public String getCategories() - { - ExpDataClass dataClass = getDataClass(); - - if (dataClass != null && dataClass.isMedia()) - return expMediaDataCategory.getName(); - - return expDataCategory.getName(); - } - - @Nullable - @Override - public SearchScope getSearchScope() - { - return SearchScope.FolderAndSubfolders; - } - - @NotNull - @Override - public String getResultNameSingular() - { - ExpDataClass dc = getDataClass(); - if (dc != null) - return dc.getName(); - return "data"; - } - - @NotNull - @Override - public String getResultNamePlural() - { - return getResultNameSingular(); - } - - @Override - public boolean includeNavigationLinks() - { - return true; - } - - @Override - public boolean includeAdvanceUI() - { - return false; - } - - @Nullable - @Override - public HtmlString getExtraHtml(ViewContext ctx) - { - String q = ctx.getActionURL().getParameter("q"); - - if (StringUtils.isNotBlank(q)) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - ActionURL url = ctx.cloneActionURL().deleteParameter(PROPERTY); - url.replaceParameter(ActionURL.Param._dc, (int)Math.round(1000 * Math.random())); - - StringBuilder html = new StringBuilder(); - html.append("
"); - - appendParam(html, null, dataclass, "All", false, url); - for (ExpDataClass dc : ExperimentService.get().getDataClasses(ctx.getContainer(), true)) - { - appendParam(html, dc.getName(), dataclass, dc.getName(), true, url); - } - - html.append("
"); - return HtmlString.unsafe(html.toString()); - } - else - { - return null; - } - } - - private void appendParam(StringBuilder sb, @Nullable String dataclass, @Nullable String current, @NotNull String label, boolean addParam, ActionURL url) - { - sb.append(""); - - if (!Objects.equals(dataclass, current)) - { - if (addParam) - url = url.clone().addParameter(PROPERTY, dataclass); - - sb.append(LinkBuilder.simpleLink(label, url)); - } - else - { - sb.append(label); - } - - sb.append(" "); - } - - @Override - public HtmlString getHiddenInputsHtml(ViewContext ctx) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - { - return InputBuilder.hidden().id("search-type").name(PROPERTY).value(dataclass).getHtmlString(); - } - - return null; - } - - - @Override - public String reviseQuery(ViewContext ctx, String q) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - - if (null != dataclass) - return "+(" + q + ") +" + PROPERTY + ":" + dataclass; - else - return q; - } - - @Override - public void addNavTrail(NavTree root, ViewContext ctx, @NotNull SearchScope scope, @Nullable String category) - { - SearchResultTemplate.super.addNavTrail(root, ctx, scope, category); - - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - { - String text = root.getText(); - root.setText(text + " - " + dataclass); - } - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.experiment.api; + +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONObject; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.exp.ExperimentDataHandler; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.Handler; +import org.labkey.api.exp.ObjectProperty; +import org.labkey.api.exp.XarFormatException; +import org.labkey.api.exp.XarSource; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataClass; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.query.ExpDataClassDataTable; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.files.FileContentService; +import org.labkey.api.pipeline.PipeRoot; +import org.labkey.api.pipeline.PipelineJob; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.QueryRowReference; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.ValidationException; +import org.labkey.api.search.SearchResultTemplate; +import org.labkey.api.search.SearchScope; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DataClassReadPermission; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.MediaReadPermission; +import org.labkey.api.security.permissions.MoveEntitiesPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.GUID; +import org.labkey.api.util.HtmlString; +import org.labkey.api.util.LinkBuilder; +import org.labkey.api.util.MimeMap; +import org.labkey.api.util.NetworkDrive; +import org.labkey.api.util.Pair; +import org.labkey.api.util.Path; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.URLHelper; +import org.labkey.api.util.InputBuilder; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.NavTree; +import org.labkey.api.view.ViewContext; +import org.labkey.api.webdav.SimpleDocumentResource; +import org.labkey.api.webdav.WebdavResource; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; + +import java.io.File; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.labkey.api.exp.query.ExpSchema.SCHEMA_EXP_DATA; + +public class ExpDataImpl extends AbstractRunItemImpl implements ExpData +{ + public enum DataOperations + { + Edit("editing", UpdatePermission.class), + EditLineage("editing lineage", UpdatePermission.class), + Delete("deleting", DeletePermission.class), + Move("moving", MoveEntitiesPermission.class); + + private final String _description; // used as a suffix in messaging users about what is not allowed + private final Class _permissionClass; + + DataOperations(String description, Class permissionClass) + { + _description = description; + _permissionClass = permissionClass; + } + + public String getDescription() + { + return _description; + } + + public Class getPermissionClass() + { + return _permissionClass; + } + } + + public static final SearchService.SearchCategory expDataCategory = new SearchService.SearchCategory("data", "ExpData", false) { + @Override + public Set getPermittedContainerIds(User user, Map containers) + { + return getPermittedContainerIds(user, containers, DataClassReadPermission.class); + } + }; + public static final SearchService.SearchCategory expMediaDataCategory = new SearchService.SearchCategory("mediaData", "ExpData for media objects", false) { + @Override + public Set getPermittedContainerIds(User user, Map containers) + { + return getPermittedContainerIds(user, containers, MediaReadPermission.class); + } + }; + + /** Cache this because it can be expensive to recompute */ + private Boolean _finalRunOutput; + + /** + * Temporary mapping until experiment.xml contains the mime type + */ + private static final MimeMap MIME_MAP = new MimeMap(); + + static public List fromDatas(List datas) + { + List ret = new ArrayList<>(datas.size()); + for (Data data : datas) + { + ret.add(new ExpDataImpl(data)); + } + return ret; + } + + // For serialization + protected ExpDataImpl() {} + + public ExpDataImpl(Data data) + { + super(data); + } + + @Override + public void setComment(User user, String comment) throws ValidationException + { + setComment(user, comment, true); + } + + @Override + public void setComment(User user, String comment, boolean index) throws ValidationException + { + super.setComment(user, comment); + + if (index) + index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); + } + + @Override + @Nullable + public ActionURL detailsURL() + { + DataType dataType = getDataType(); + if (dataType != null) + { + ActionURL url = dataType.getDetailsURL(this); + if (url != null) + return url; + } + + return _object.detailsURL(); + } + + @Override + public @Nullable QueryRowReference getQueryRowReference() + { + return getQueryRowReference(null); + } + + @Override + public @Nullable QueryRowReference getQueryRowReference(@Nullable User user) + { + ExpDataClassImpl dc = getDataClass(user); + if (dc != null) + return new QueryRowReference(getContainer(), SCHEMA_EXP_DATA, dc.getName(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + + // Issue 40123: see MedImmuneDataHandler MEDIMMUNE_DATA_TYPE, this claims the "Data" namespace + DataType type = getDataType(); + if (type != null) + { + QueryRowReference queryRowReference = type.getQueryRowReference(this); + if (queryRowReference != null) + return queryRowReference; + } + + return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Data.name(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + } + + @Override + public List getTargetApplications() + { + return getTargetApplications(new SimpleFilter(FieldKey.fromParts("DataId"), getRowId()), ExperimentServiceImpl.get().getTinfoDataInput()); + } + + @Override + public List getTargetRuns() + { + return getTargetRuns(ExperimentServiceImpl.get().getTinfoDataInput(), "DataId"); + } + + @Override + public DataType getDataType() + { + return ExperimentService.get().getDataType(getLSIDNamespacePrefix()); + } + + @Override + public void setDataFileURI(URI uri) + { + ensureUnlocked(); + _object.setDataFileUrl(ExpData.normalizeDataFileURI(uri)); + } + + @Override + public void save(User user) + { + // Replace the default "Data" cpastype if the Data belongs to a DataClass + ExpDataClassImpl dataClass = getDataClass(); + if (dataClass != null && ExpData.DEFAULT_CPAS_TYPE.equals(getCpasType())) + setCpasType(dataClass.getLSID()); + + boolean isNew = getRowId() == 0; + save(user, ExperimentServiceImpl.get().getTinfoData(), true); + + if (isNew) + { + if (dataClass != null) + { + Map map = new HashMap<>(); + map.put("lsid", getLSID()); + Table.insert(user, dataClass.getTinfo(), map); + } + } + index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); + } + + @Override + protected void save(User user, TableInfo table, boolean ensureObject) + { + assert ensureObject; + super.save(user, table, true); + } + + @Override + public URI getDataFileURI() + { + String url = _object.getDataFileUrl(); + if (url == null) + return null; + try + { + return new URI(_object.getDataFileUrl()); + } + catch (URISyntaxException use) + { + return null; + } + } + + @Override + public ExperimentDataHandler findDataHandler() + { + return Handler.Priority.findBestHandler(ExperimentServiceImpl.get().getExperimentDataHandlers(), this); + } + + @Override + public String getDataFileUrl() + { + return _object.getDataFileUrl(); + } + + @Override + public boolean hasFileScheme() + { + return !FileUtil.hasCloudScheme(getDataFileUrl()); + } + + @Override + @Nullable + public File getFile() + { + return _object.getFile(); + } + + @Override + public @Nullable FileLike getFileLike() + { + return _object.getFileLike(); + } + + @Override + @Nullable + public java.nio.file.Path getFilePath() + { + return _object.getFilePath(); + } + + @Override + public boolean isInlineImage() + { + return null != getFile() && MIME_MAP.isInlineImageFor(getFile()); + } + + @Override + public void delete(User user) + { + delete(user, true); + } + + @Override + public void delete(User user, boolean deleteRunsUsingData) + { + ExperimentServiceImpl.get().deleteDataByRowIds(user, getContainer(), Collections.singleton(getRowId()), deleteRunsUsingData); + } + + public String getMimeType() + { + if (null != getDataFileUrl()) + return MIME_MAP.getContentTypeFor(getDataFileUrl()); + else + return null; + } + + @Override + public boolean isFileOnDisk() + { + java.nio.file.Path f = getFilePath(); + if (f != null) + if (!FileUtil.hasCloudScheme(f)) + return NetworkDrive.exists(f.toFile()) && !Files.isDirectory(f); + else + return Files.exists(f); + else + return false; + } + + public boolean isPathAccessible() + { + java.nio.file.Path path = getFilePath(); + return (null != path && Files.exists(path)); + } + + @Override + public String getCpasType() + { + String result = _object.getCpasType(); + if (result != null) + return result; + + ExpDataClass dataClass = getDataClass(); + if (dataClass != null) + return dataClass.getLSID(); + + return ExpData.DEFAULT_CPAS_TYPE; + } + + public void setGenerated(boolean generated) + { + ensureUnlocked(); + _object.setGenerated(generated); + } + + @Override + public boolean isGenerated() + { + return _object.isGenerated(); + } + + @Override + public boolean isFinalRunOutput() + { + if (_finalRunOutput == null) + { + ExpRun run = getRun(); + _finalRunOutput = run != null && run.isFinalOutput(this); + } + return _finalRunOutput.booleanValue(); + } + + @Override + @Nullable + public ExpDataClassImpl getDataClass() + { + return getDataClass(null); + } + + @Override + @Nullable + public ExpDataClassImpl getDataClass(@Nullable User user) + { + if (_object.getClassId() != null && getContainer() != null) + { + if (user == null) + return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId()); + else + return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId(), true); + } + + return null; + } + + @Override + public void importDataFile(PipelineJob job, XarSource xarSource) throws ExperimentException + { + String dataFileURL = getDataFileUrl(); + if (dataFileURL == null) + return; + + if (xarSource.shouldIgnoreDataFiles()) + { + job.debug("Skipping load of data file " + dataFileURL + " based on the XAR source"); + return; + } + + job.debug("Trying to load data file " + dataFileURL + " into the system"); + + java.nio.file.Path path = FileUtil.stringToPath(getContainer(), dataFileURL); + + if (!Files.exists(path)) + { + job.debug("Unable to find the data file " + FileUtil.getAbsolutePath(getContainer(), path) + " on disk."); + return; + } + + // Check that the file is under the pipeline root to prevent users from referencing a file that they + // don't have permission to import + PipeRoot pr = PipelineService.get().findPipelineRoot(job.getContainer()); + if (!xarSource.allowImport(pr, job.getContainer(), path)) + { + if (pr == null) + { + job.warn("No pipeline root was set, skipping load of file " + FileUtil.getAbsolutePath(getContainer(), path)); + return; + } + job.debug("The data file " + FileUtil.getAbsolutePath(getContainer(), path) + " is not under the folder's pipeline root: " + pr + ". It will not be loaded directly, but may be loaded if referenced from other files that are under the pipeline root."); + return; + } + + ExperimentDataHandler handler = findDataHandler(); + try + { + handler.importFile(this, FileSystemLike.wrapFile(path), job.getInfo(), job.getLogger(), xarSource.getXarContext()); + } + catch (ExperimentException e) + { + throw new XarFormatException(e); + } + + job.debug("Finished trying to load data file " + dataFileURL + " into the system"); + } + + // Get all text and int strings from the data class for indexing + private void getIndexValues( + Map props, + @NotNull ExpDataClassDataTableImpl table, + Set identifiersHi, + Set identifiersMed, + Set identifiersLo, + Set keywordHi, + Set keywordMed, + Set keywordsLo, + JSONObject jsonData + ) + { + CaseInsensitiveHashSet skipColumns = new CaseInsensitiveHashSet(); + for (ExpDataClassDataTable.Column column : ExpDataClassDataTable.Column.values()) + skipColumns.add(column.name()); + skipColumns.add("Ancestors"); + skipColumns.add("Container"); + + processIndexValues(props, table, skipColumns, identifiersHi, identifiersMed, identifiersLo, keywordHi, keywordMed, keywordsLo, jsonData); + } + + @Override + @NotNull + public Collection getAliases() + { + TableInfo mapTi = ExperimentService.get().getTinfoDataAliasMap(); + TableInfo ti = ExperimentService.get().getTinfoAlias(); + SQLFragment sql = new SQLFragment() + .append("SELECT a.name FROM ").append(mapTi, "m") + .append(" JOIN ").append(ti, "a") + .append(" ON m.alias = a.RowId WHERE m.lsid = ? "); + sql.add(getLSID()); + ArrayList aliases = new SqlSelector(mapTi.getSchema(), sql).getArrayList(String.class); + return Collections.unmodifiableList(aliases); + } + + @Override + public String getDocumentId() + { + String dataClassName = "-"; + ExpDataClass dc = getDataClass(); + if (dc != null) + dataClassName = dc.getName(); + // why not just data:rowId? + return "data:" + new Path(getContainer().getId(), dataClassName, Long.toString(getRowId())).encode(); + } + + @Override + protected TableSelector getObjectPropertiesSelector(@NotNull TableInfo table) + { + return new TableSelector(table, new SimpleFilter(ExpDataTable.Column.RowId.fieldKey(), getRowId()), null); + } + + @Override + public Map getObjectProperties() + { + return getObjectProperties(getDataClass()); + } + + @Override + public Map getObjectProperties(@Nullable User user) + { + return getObjectProperties(getDataClass(user)); + } + + private Map getObjectProperties(ExpDataClassImpl dataClass) + { + HashMap ret = new HashMap<>(super.getObjectProperties()); + var ti = null == dataClass ? null : dataClass.getTinfo(); + if (null != ti) + { + ret.putAll(getObjectProperties(ti)); + } + return ret; + } + + private static Pair getRowIdClassNameContainerFromDocumentId(String resourceIdentifier, Map dcCache) + { + if (resourceIdentifier.startsWith("data:")) + resourceIdentifier = resourceIdentifier.substring("data:".length()); + + Path path = Path.parse(resourceIdentifier); + if (path.size() != 3) + return null; + String containerId = path.get(0); + String dataClassName = path.get(1); + String rowIdString = path.get(2); + + long rowId; + try + { + rowId = Long.parseLong(rowIdString); + if (rowId == 0) + return null; + } + catch (NumberFormatException ex) + { + return null; + } + + Container c = ContainerManager.getForId(containerId); + if (c == null) + return null; + + ExpDataClass dc = null; + if (!StringUtils.isEmpty(dataClassName) && !dataClassName.equals("-")) + { + String dcKey = containerId + '-' + dataClassName; + dc = dcCache.computeIfAbsent(dcKey, (x) -> ExperimentServiceImpl.get().getDataClass(c, dataClassName)); + } + + return new Pair<>(rowId, dc); + } + + @Nullable + public static ExpDataImpl fromDocumentId(String resourceIdentifier) + { + Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, new HashMap<>()); + if (rowIdDataClass == null) + return null; + + Long rowId = rowIdDataClass.first; + ExpDataClass dc = rowIdDataClass.second; + + if (dc != null) + return ExperimentServiceImpl.get().getExpData(dc, rowId); + else + return ExperimentServiceImpl.get().getExpData(rowId); + } + + @Nullable + public static Map fromDocumentIds(Collection resourceIdentifiers) + { + Map rowIdIdentifierMap = new LongHashMap<>(); + Map dcCache = new HashMap<>(); + Map dcMap = new LongHashMap<>(); + Map> dcRowIdMap = new LongHashMap<>(); // data rowIds with dataClass + List rowIds = new ArrayList<>(); // data rowIds without dataClass + for (String resourceIdentifier : resourceIdentifiers) + { + Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, dcCache); + if (rowIdDataClass == null) + continue; + + Long rowId = rowIdDataClass.first; + ExpDataClass dc = rowIdDataClass.second; + + rowIdIdentifierMap.put(rowId, resourceIdentifier); + + if (dc != null) + { + dcMap.put(dc.getRowId(), dc); + dcRowIdMap + .computeIfAbsent(dc.getRowId(), (k) -> new ArrayList<>()) + .add(rowId); + } + else + rowIds.add(rowId); + } + + List expDatas = new ArrayList<>(); + if (!rowIds.isEmpty()) + expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(rowIds)); + + if (!dcRowIdMap.isEmpty()) + { + for (Long dataClassId : dcRowIdMap.keySet()) + { + ExpDataClass dc = dcMap.get(dataClassId); + if (dc != null) + expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(dc, dcRowIdMap.get(dataClassId))); + } + } + + Map identifierDatas = new HashMap<>(); + for (ExpData data : expDatas) + { + identifierDatas.put(rowIdIdentifierMap.get(data.getRowId()), data); + } + + return identifierDatas; + } + + @Override + public @Nullable URI getWebDavURL(@NotNull FileContentService.PathType type) + { + java.nio.file.Path path = getFilePath(); + if (path == null) + { + return null; + } + + Container c = getContainer(); + if (c == null) + { + return null; + } + + return FileContentService.get().getWebDavUrl(path, c, type); + } + + @Override + public @Nullable WebdavResource createIndexDocument(@Nullable TableInfo tableInfo) + { + Container container = getContainer(); + if (container == null) + return null; + + Map props = new HashMap<>(); + JSONObject jsonData = new JSONObject(); + Set keywordsHi = new HashSet<>(); + Set keywordsMed = new HashSet<>(); + Set keywordsLo = new HashSet<>(); + + Set identifiersHi = new HashSet<>(); + Set identifiersMed = new HashSet<>(); + Set identifiersLo = new HashSet<>(); + + StringBuilder body = new StringBuilder(); + + // Name is an identifier with the highest weight + identifiersHi.add(getName()); + keywordsMed.add(getName()); // also add to keywords since those are stemmed + + // Description is added as a keywordsLo -- in Biologics it is common for the description to + // contain names of other DataClasses, e.g., "Mature desK of PS-10", which would be tokenized as + // [mature, desk, ps, 10] if added it as a keyword so we lower its priority to avoid useless results. + // CONSIDER: tokenize the description and extract identifiers + if (null != getDescription()) + keywordsLo.add(getDescription()); + + String comment = getComment(); + if (comment != null) + keywordsMed.add(comment); + + // Add aliases in parentheses in the title + StringBuilder title = new StringBuilder(getName()); + Collection aliases = getAliases(); + if (!aliases.isEmpty()) + { + title.append(" (").append(StringUtils.join(aliases, ", ")).append(")"); + identifiersHi.addAll(aliases); + } + + ExpDataClassImpl dc = getDataClass(User.getSearchUser()); + if (dc != null) + { + ActionURL show = new ActionURL(ExperimentController.ShowDataClassAction.class, container).addParameter("rowId", dc.getRowId()); + NavTree t = new NavTree(dc.getName(), show); + String nav = NavTree.toJS(Collections.singleton(t), null, false, true).toString(); + props.put(SearchService.PROPERTY.navtrail.toString(), nav); + + props.put(DataSearchResultTemplate.PROPERTY, dc.getName()); + body.append(dc.getName()); + + if (tableInfo == null) + tableInfo = QueryService.get().getUserSchema(User.getSearchUser(), container, SCHEMA_EXP_DATA).getTable(dc.getName()); + + if (!(tableInfo instanceof ExpDataClassDataTableImpl expDataClassDataTable)) + throw new IllegalArgumentException(String.format("Unable to index data class item in %s. Table must be an instance of %s", dc.getName(), ExpDataClassDataTableImpl.class.getName())); + + if (!expDataClassDataTable.getDataClass().equals(dc)) + throw new IllegalArgumentException(String.format("Data class table mismatch for %s", dc.getName())); + + // Collect other text columns and lookup display columns + getIndexValues(props, expDataClassDataTable, identifiersHi, identifiersMed, identifiersLo, keywordsHi, keywordsMed, keywordsLo, jsonData); + } + + // === Stored, not indexed + if (dc != null && dc.isMedia()) + props.put(SearchService.PROPERTY.categories.toString(), expMediaDataCategory.toString()); + else + props.put(SearchService.PROPERTY.categories.toString(), expDataCategory.toString()); + props.put(SearchService.PROPERTY.title.toString(), title.toString()); + props.put(SearchService.PROPERTY.jsonData.toString(), jsonData); + + ActionURL view = ExperimentController.ExperimentUrlsImpl.get().getDataDetailsURL(this); + view.setExtraPath(container.getId()); + String docId = getDocumentId(); + + // Generate a summary explicitly instead of relying on a summary to be extracted + // from the document body. Placing lookup values and the description in the body + // would tokenize using the English analyzer and index "PS-12" as ["ps", "12"] which leads to poor results. + StringBuilder summary = new StringBuilder(); + if (StringUtils.isNotEmpty(getDescription())) + summary.append(getDescription()).append("\n"); + + appendTokens(summary, keywordsMed); + appendTokens(summary, identifiersMed); + appendTokens(summary, identifiersLo); + + props.put(SearchService.PROPERTY.summary.toString(), summary); + + return new ExpDataResource( + getRowId(), + new Path(docId), + docId, + container.getEntityId(), + "text/plain", + body.toString(), + view, + props, + getCreatedBy(), + getCreated(), + getModifiedBy(), + getModified() + ); + } + + private static void appendTokens(StringBuilder sb, Collection toks) + { + if (toks.isEmpty()) + return; + + sb.append(toks.stream().map(s -> s.length() > 30 ? StringUtilsLabKey.leftSurrogatePairFriendly(s, 30) + "\u2026" : s).collect(Collectors.joining(", "))).append("\n"); + } + + private static class ExpDataResource extends SimpleDocumentResource + { + final long _rowId; + + public ExpDataResource(long rowId, Path path, String documentId, GUID containerId, String contentType, String body, URLHelper executeUrl, Map properties, User createdBy, Date created, User modifiedBy, Date modified) + { + super(path, documentId, containerId, contentType, body, executeUrl, createdBy, created, modifiedBy, modified, properties); + _rowId = rowId; + } + + @Override + public void setLastIndexed(long ms, long modified) + { + ExperimentServiceImpl.get().setDataLastIndexed(_rowId, ms); + } + } + + public static class DataSearchResultTemplate implements SearchResultTemplate + { + public static final String NAME = "data"; + public static final String PROPERTY = "dataclass"; + + @Nullable + @Override + public String getName() + { + return NAME; + } + + private ExpDataClass getDataClass() + { + if (HttpView.hasCurrentView()) + { + ViewContext ctx = HttpView.currentContext(); + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + return ExperimentService.get().getDataClass(ctx.getContainer(), dataclass, true); + } + return null; + } + + @Nullable + @Override + public String getCategories() + { + ExpDataClass dataClass = getDataClass(); + + if (dataClass != null && dataClass.isMedia()) + return expMediaDataCategory.getName(); + + return expDataCategory.getName(); + } + + @Nullable + @Override + public SearchScope getSearchScope() + { + return SearchScope.FolderAndSubfolders; + } + + @NotNull + @Override + public String getResultNameSingular() + { + ExpDataClass dc = getDataClass(); + if (dc != null) + return dc.getName(); + return "data"; + } + + @NotNull + @Override + public String getResultNamePlural() + { + return getResultNameSingular(); + } + + @Override + public boolean includeNavigationLinks() + { + return true; + } + + @Override + public boolean includeAdvanceUI() + { + return false; + } + + @Nullable + @Override + public HtmlString getExtraHtml(ViewContext ctx) + { + String q = ctx.getActionURL().getParameter("q"); + + if (StringUtils.isNotBlank(q)) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + ActionURL url = ctx.cloneActionURL().deleteParameter(PROPERTY); + url.replaceParameter(ActionURL.Param._dc, (int)Math.round(1000 * Math.random())); + + StringBuilder html = new StringBuilder(); + html.append("
"); + + appendParam(html, null, dataclass, "All", false, url); + for (ExpDataClass dc : ExperimentService.get().getDataClasses(ctx.getContainer(), true)) + { + appendParam(html, dc.getName(), dataclass, dc.getName(), true, url); + } + + html.append("
"); + return HtmlString.unsafe(html.toString()); + } + else + { + return null; + } + } + + private void appendParam(StringBuilder sb, @Nullable String dataclass, @Nullable String current, @NotNull String label, boolean addParam, ActionURL url) + { + sb.append(""); + + if (!Objects.equals(dataclass, current)) + { + if (addParam) + url = url.clone().addParameter(PROPERTY, dataclass); + + sb.append(LinkBuilder.simpleLink(label, url)); + } + else + { + sb.append(label); + } + + sb.append(" "); + } + + @Override + public HtmlString getHiddenInputsHtml(ViewContext ctx) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + { + return InputBuilder.hidden().id("search-type").name(PROPERTY).value(dataclass).getHtmlString(); + } + + return null; + } + + + @Override + public String reviseQuery(ViewContext ctx, String q) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + + if (null != dataclass) + return "+(" + q + ") +" + PROPERTY + ":" + dataclass; + else + return q; + } + + @Override + public void addNavTrail(NavTree root, ViewContext ctx, @NotNull SearchScope scope, @Nullable String category) + { + SearchResultTemplate.super.addNavTrail(root, ctx, scope, category); + + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + { + String text = root.getText(); + root.setText(text + " - " + dataclass); + } + } + } +} diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index bd8273f8d99..9a54d4d0246 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -161,8 +161,6 @@ public class SampleTypeUpdateServiceDI extends DefaultQueryUpdateService public static final String ROOT_RECOMPUTE_ROWID_SET = "RootIdToRecomputeSet"; public static final String PARENT_RECOMPUTE_NAME_SET = "ParentNameToRecomputeSet"; - public static final String EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE = "org.labkey.experiment.api.SampleTypeUpdateServiceDI#ALLOW_ROW_ID_SAMPLE_MERGE"; - public static final Map SAMPLE_ALT_IMPORT_NAME_COLS; private static final Map ALIQUOT_ROLLUP_FIELDS = Map.of( @@ -629,67 +627,6 @@ private void checkPartitionForDuplicates(List> partitionRows } } - /** - * Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion - */ - @Deprecated - @Override - protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) - { - Map result = new CaseInsensitiveHashMap<>(row.size()); - Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); - Object unitsVal = null; - ColumnInfo unitsCol = null; - Object amountVal = null; - ColumnInfo amountCol = null; - if (row.containsKey(Units.name())) - { - unitsVal = row.get(Units.name()); - unitsCol = columnMap.get(Units.name()); - } - for (String colName : StoredAmount.namesAndLabels()) - { - if (row.containsKey(colName)) - { - amountVal = row.get(colName); - amountCol = columnMap.get(colName); - break; - } - } - if (amountVal != null) - { - String unitsStr = ""; - if (unitsVal != null) - unitsStr = " " + unitsVal; - - providedValues.put(PROVIDED_DATA_PREFIX + StoredAmount.label(), amountVal + unitsStr); - } - - Unit baseUnit = _sampleType != null ? _sampleType.getBaseUnit() : null; - - for (Map.Entry entry : row.entrySet()) - { - ColumnInfo col = columnMap.get(entry.getKey()); - - Object value = entry.getValue(); - if (col != null && col == unitsCol) - { - value = _SamplesCoerceDataIterator.SampleUnitsConvertColumn.getValue(unitsVal, amountVal, amountCol != null, baseUnit, _sampleType == null ? null : _sampleType.getName()); - } - else if (col != null && col == amountCol) - { - value = _SamplesCoerceDataIterator.SampleAmountConvertColumn.getValue(amountVal, unitsCol != null, unitsVal, baseUnit, _sampleType == null ? null : _sampleType.getName()); - } - else - { - value = coerceTypesValue(col, providedValues, entry.getKey(), value); - } - - result.put(entry.getKey(), value); - } - return result; - } - @Override public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws BatchValidationException @@ -835,29 +772,9 @@ public static boolean isAliquotStatusChangeNeedRecalc(Collection available @Override protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException { - if (row.containsKey(LSID.name()) && !(row.containsKey(RowId.name()) || row.containsKey(Name.name()))) - throw new ValidationException("Either RowId or Name is required to update a sample."); - - Map result = super.updateRow(user, container, row, oldRow, allowOwner, retainCreation); - - // add MaterialInput/DataInputs field from parent alias - try - { - Map parentAliases = _sampleType.getImportAliases(); - for (String alias : parentAliases.keySet()) - { - if (row.containsKey(alias)) - result.put(parentAliases.get(alias), result.get(alias)); - } - } - catch (IOException e) - { - throw new RuntimeException(e); - } + throw new UnsupportedOperationException("_update() is no longer supported for samples"); - return result; } @Override @@ -1447,7 +1364,7 @@ public DataIterator getDataIterator(DataIteratorContext context) // While accepting RowId during merge is not our preferred behavior, we want to give users a way // to opt-in to the old behavior where RowId is accepted and ignored. - if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE)) + if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE)) { context.getErrors().addRowError(new ValidationException("RowId is not accepted when merging samples. Specify only the sample name instead.", RowId.name())); return null; @@ -1466,8 +1383,6 @@ public DataIterator getDataIterator(DataIteratorContext context) return null; } - if (context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - drop.remove(LSID.name()); if (!drop.isEmpty()) di = new DropColumnsDataIterator(di, drop); @@ -1652,18 +1567,13 @@ static class _GenerateNamesDataIterator extends SimpleTranslator _nameState = sampleType.getNameGenState(skipDuplicateCheck, true, _container, user); lsidBuilder = sampleType.generateSampleLSID(); - boolean useLsidForUpdate = context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); - if (useLsidForUpdate) - selectAll(CaseInsensitiveHashSet.of(Name.name(), RootMaterialRowId.name())); - else - selectAll(CaseInsensitiveHashSet.of(Name.name(), LSID.name(), RootMaterialRowId.name())); + selectAll(CaseInsensitiveHashSet.of(Name.name(), LSID.name(), RootMaterialRowId.name())); addColumn(new BaseColumnInfo(Name.fieldKey(), JdbcType.VARCHAR), (Supplier)() -> generatedName); - if (!useLsidForUpdate) - { - DbSequence lsidDbSeq = sampleType.getSampleLsidDbSeq(batchSize, sampleType.getContainer()); - addColumn(new BaseColumnInfo(LSID.name(), JdbcType.VARCHAR), (Supplier) () -> lsidBuilder.setObjectId(String.valueOf(lsidDbSeq.next())).toString()); - } + + DbSequence lsidDbSeq = sampleType.getSampleLsidDbSeq(batchSize, sampleType.getContainer()); + addColumn(new BaseColumnInfo(LSID.name(), JdbcType.VARCHAR), (Supplier) () -> lsidBuilder.setObjectId(String.valueOf(lsidDbSeq.next())).toString()); + addColumn(new BaseColumnInfo(CpasType.fieldKey(), JdbcType.VARCHAR), new SimpleTranslator.ConstantColumn(sampleType.getLSID())); addColumn(new BaseColumnInfo(MaterialSourceId.fieldKey(), JdbcType.INTEGER), new SimpleTranslator.ConstantColumn(sampleType.getRowId())); } diff --git a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java index 0b446196ae0..d089d6c2bc9 100644 --- a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java +++ b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java @@ -309,7 +309,6 @@ protected void importTsvData(FolderImportContext ctx, XarContext xarContext, Str options.put(SampleTypeService.ConfigParameters.DeferAliquotRuns, true); if (isUpdate) options.put(QueryUpdateService.ConfigParameters.SkipRequiredFieldValidation, true); - options.put(ExperimentService.QueryOptions.UseLsidForUpdate, !isUpdate); options.put(ExperimentService.QueryOptions.DeferRequiredLineageValidation, true); context.setConfigParameters(options); From 09dd6b06f3a9a15522644f9fa26cb462e3e53e76 Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 9 Mar 2026 20:03:52 -0700 Subject: [PATCH 2/7] Enable upgrade script --- .../api/query/AbstractQueryUpdateService.java | 99 +++++++++++++++++++ .../postgresql/exp-26.004-26.005.sql | 1 + .../dbscripts/sqlserver/exp-26.004-26.005.sql | 1 + .../test/integration/DataClassCrud.ispec.ts | 17 +++- .../labkey/experiment/ExperimentModule.java | 2 +- .../api/ExpDataClassDataTableImpl.java | 39 ++------ .../api/SampleTypeUpdateServiceDI.java | 81 +-------------- 7 files changed, 131 insertions(+), 109 deletions(-) create mode 100644 experiment/resources/schemas/dbscripts/postgresql/exp-26.004-26.005.sql create mode 100644 experiment/resources/schemas/dbscripts/sqlserver/exp-26.004-26.005.sql diff --git a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java index 4972b62bd17..07c52e39623 100644 --- a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java +++ b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java @@ -16,6 +16,7 @@ package org.labkey.api.query; import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.jetbrains.annotations.NotNull; @@ -122,6 +123,8 @@ import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.Name; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.RowId; import static org.labkey.api.files.FileContentService.UPLOADED_FILE; import static org.labkey.api.util.FileUtil.toFileForRead; import static org.labkey.api.util.FileUtil.toFileForWrite; @@ -903,6 +906,102 @@ public List> updateRows(User user, Container container, List return result; } + protected void validatePartitionedRowKeys(Collection columns) + { + // do nothing + } + + public List> updateRowsUsingPartitionedDIB( + DbScope.Transaction tx, + User user, + Container container, + List> rows, + BatchValidationException errors, + @Nullable Map configParameters, + Map extraScriptContext + ) + { + int index = 0; + int numPartitions = 0; + List> ret = new ArrayList<>(); + + Set observedRowIds = new HashSet<>(); + Set observedNames = new CaseInsensitiveHashSet(); + + while (index < rows.size()) + { + CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); + + validatePartitionedRowKeys(rowKeys); + + int nextIndex = index + 1; + while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) + nextIndex++; + + List> rowsToProcess = rows.subList(index, nextIndex); + index = nextIndex; + numPartitions++; + + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, configParameters); + + // skip audit summary for the partitions, we will perform it once at the end + context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); + + List> subRet = _updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); + + // we need to throw if we don't want executeWithRetry() attempt commit() + if (context.getErrors().hasErrors()) + throw new DbScope.RetryPassthroughException(context.getErrors()); + + if (subRet != null) + { + ret.addAll(subRet); + + // Check if duplicate rows have been processed across the partitions + // Only start checking for duplicates after the first partition has been processed. + if (numPartitions > 1) + { + // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition + checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); + } + + if (errors.hasErrors()) + throw new DbScope.RetryPassthroughException(errors); + } + } + + if (numPartitions > 1) + { + var auditEvent = tx.getAuditEvent(); + if (auditEvent != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); + } + + _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, configParameters), ret.size()); + + return ret; + } + + private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) + { + for (Map row : partitionRows) + { + Long rowId = MapUtils.getLong(row, RowId.name()); + if (rowId != null && !globalRowIds.add(rowId)) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); + return; + } + + Object nameObj = row.get(Name.name()); + if (nameObj != null && !globalNames.add(nameObj.toString())) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); + return; + } + } + } + protected void checkDuplicateUpdate(Object pkVals) throws ValidationException { if (pkVals == null) diff --git a/experiment/resources/schemas/dbscripts/postgresql/exp-26.004-26.005.sql b/experiment/resources/schemas/dbscripts/postgresql/exp-26.004-26.005.sql new file mode 100644 index 00000000000..dd609550c06 --- /dev/null +++ b/experiment/resources/schemas/dbscripts/postgresql/exp-26.004-26.005.sql @@ -0,0 +1 @@ +SELECT core.executeJavaUpgradeCode('dropProvisionedDataClassLsidColumn'); diff --git a/experiment/resources/schemas/dbscripts/sqlserver/exp-26.004-26.005.sql b/experiment/resources/schemas/dbscripts/sqlserver/exp-26.004-26.005.sql new file mode 100644 index 00000000000..5f8b8c8ae76 --- /dev/null +++ b/experiment/resources/schemas/dbscripts/sqlserver/exp-26.004-26.005.sql @@ -0,0 +1 @@ +EXEC core.executeJavaUpgradeCode 'dropProvisionedDataClassLsidColumn'; diff --git a/experiment/src/client/test/integration/DataClassCrud.ispec.ts b/experiment/src/client/test/integration/DataClassCrud.ispec.ts index afc9cfef66d..f84394137b7 100644 --- a/experiment/src/client/test/integration/DataClassCrud.ispec.ts +++ b/experiment/src/client/test/integration/DataClassCrud.ispec.ts @@ -459,7 +459,6 @@ describe('Duplicate IDs', () => { }] }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { errorResp = JSON.parse(result.text); - expect(errorResp).toBe('a'); expect(errorResp['exception']).toBe('Duplicate key provided: ' + dataName1); }); @@ -472,7 +471,7 @@ describe('Duplicate IDs', () => { rowId: data1RowId },{ description: 'update', - name: data2RowId + name: dataName2 },{ description: 'update', rowId: data1RowId @@ -953,6 +952,20 @@ describe('Data CRUD', () => { expect(caseInsensitive(row3, 'description')).toBe('mixedVal3 desc'); expect(caseInsensitive(row3, fieldName)).toBe('val3'); // fieldName value should not be updated for row3 + // update using name as key, should succeed, verify update is successful and data are updated correctly + await ExperimentCRUDUtils.updateRows(server, [ + { name: dataName1, description: 'updByName1', [fieldName]: 'nameVal1' }, + { name: 'mixed_rename2', description: 'updByName2', [fieldName]: 'nameVal2' }, + ], 'exp.data', dataType, topFolderOptions, editorUserOptions); + + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + row1 = findRow(rows, row1RowId); + row2 = findRow(rows, row2RowId); + expect(caseInsensitive(row1, 'description')).toBe('updByName1'); + expect(caseInsensitive(row1, fieldName)).toBe('nameVal1'); + expect(caseInsensitive(row2, 'description')).toBe('updByName2'); + expect(caseInsensitive(row2, fieldName)).toBe('nameVal2'); + // update names of both rows using lsid (ignored) an rowId as key, verify update is successful and names are updated correctly const newName1 = 'RenamedByLsid1'; const newName2 = 'RenamedByLsid2'; diff --git a/experiment/src/org/labkey/experiment/ExperimentModule.java b/experiment/src/org/labkey/experiment/ExperimentModule.java index 90d22e0091a..9eb5e1808e3 100644 --- a/experiment/src/org/labkey/experiment/ExperimentModule.java +++ b/experiment/src/org/labkey/experiment/ExperimentModule.java @@ -205,7 +205,7 @@ public String getName() @Override public Double getSchemaVersion() { - return 26.004; + return 26.005; } @Nullable diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index f1bb75b8fed..2bffcbdb2b9 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -692,7 +692,6 @@ public SQLFragment getFromSQLExpanded(String alias, Set selectedColumn // all columns from dataclass property table except key columns Set pCols = new CaseInsensitiveHashSet(provisioned.getColumnNameSet()); pCols.remove("name"); - pCols.remove("lsid"); // TODO remove pCols.remove("rowId"); boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, pCols); @@ -1415,7 +1414,7 @@ protected Map updateRow(User user, Container container, Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) throws SQLException, ValidationException // TODO remove + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) { throw new UnsupportedOperationException("_update() is no longer supported for dataclass"); } @@ -1426,38 +1425,20 @@ public List> updateRows(User user, Container container, List if (rows == null || rows.isEmpty()) return Collections.emptyList(); + List> results; Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; recordDataIteratorUsed(configParameters); - List> results = new ArrayList<>(); - int index = 0; - - while (index < rows.size()) + try { - // TODO: check for duplicates - - CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); - - int nextIndex = index + 1; - while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) - nextIndex++; - - List> rowsToProcess = rows.subList(index, nextIndex); - index = nextIndex; - - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters); - List> subRet = super._updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); - - if (context.getErrors().hasErrors()) - throw context.getErrors(); - - if (subRet != null) - results.addAll(subRet); - - // TODO: record partitions + results = getSchema().getScope().executeWithRetry(tx -> + updateRowsUsingPartitionedDIB(tx, user, container, rows, errors, finalConfigParameters, extraScriptContext)); + } + catch (DbScope.RetryPassthroughException retryException) + { + retryException.rethrow(BatchValidationException.class); + throw retryException.throwRuntimeException(); } - - // summary audit? return results; } diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index 9a54d4d0246..9deaccfce62 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -531,66 +531,7 @@ public List> updateRows( try { results = getSchema().getDbSchema().getScope().executeWithRetry(tx -> - { - int index = 0; - int numPartitions = 0; - List> ret = new ArrayList<>(); - - Set observedRowIds = new HashSet<>(); - Set observedNames = new CaseInsensitiveHashSet(); - - while (index < rows.size()) - { - CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); - confirmAmountAndUnitsColumns(rowKeys); - - int nextIndex = index + 1; - while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) - nextIndex++; - - List> rowsToProcess = rows.subList(index, nextIndex); - index = nextIndex; - numPartitions++; - - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters); - - // skip audit summary for the partitions, we will perform it once at the end - context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); - - List> subRet = super._updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); - - // we need to throw if we don't want executeWithRetry() attempt commit() - if (context.getErrors().hasErrors()) - throw new DbScope.RetryPassthroughException(context.getErrors()); - - if (subRet != null) - { - ret.addAll(subRet); - - // Check if duplicate rows have been processed across the partitions - // Only start checking for duplicates after the first partition has been processed. - if (numPartitions > 1) - { - // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition - checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); - } - - if (errors.hasErrors()) - throw new DbScope.RetryPassthroughException(errors); - } - } - - if (numPartitions > 1) - { - var auditEvent = tx.getAuditEvent(); - if (auditEvent != null) - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); - } - - _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters), ret.size()); - - return ret; - }); + updateRowsUsingPartitionedDIB(tx, user, container, rows, errors, finalConfigParameters, extraScriptContext)); } catch (DbScope.RetryPassthroughException retryException) { @@ -607,24 +548,10 @@ public List> updateRows( return results; } - private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) + @Override + protected void validatePartitionedRowKeys(Collection columns) { - for (Map row : partitionRows) - { - Long rowId = MapUtils.getLong(row, RowId.name()); - if (rowId != null && !globalRowIds.add(rowId)) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); - return; - } - - Object nameObj = row.get(Name.name()); - if (nameObj != null && !globalNames.add(nameObj.toString())) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); - return; - } - } + confirmAmountAndUnitsColumns(columns); } @Override From b5e2ac2002fdbc694f9a7512777e838e1469633a Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 9 Mar 2026 20:05:58 -0700 Subject: [PATCH 3/7] CRLF --- .../labkey/api/exp/query/ExpDataTable.java | 178 +- .../api/query/AbstractQueryUpdateService.java | 3318 ++++++++--------- .../api/query/DefaultQueryUpdateService.java | 1872 +++++----- .../labkey/experiment/ExperimentModule.java | 2346 ++++++------ .../labkey/experiment/api/ExpDataImpl.java | 1970 +++++----- 5 files changed, 4842 insertions(+), 4842 deletions(-) diff --git a/api/src/org/labkey/api/exp/query/ExpDataTable.java b/api/src/org/labkey/api/exp/query/ExpDataTable.java index 99f42cb5b27..c18e3a8e7bd 100644 --- a/api/src/org/labkey/api/exp/query/ExpDataTable.java +++ b/api/src/org/labkey/api/exp/query/ExpDataTable.java @@ -1,89 +1,89 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.api.exp.query; - -import org.labkey.api.data.MutableColumnInfo; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpExperiment; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.query.FieldKey; - -public interface ExpDataTable extends ExpTable -{ - enum Column - { - Alias, - ContentLink, - ClassId, // database table only - CpasType, // database table only - Created, - CreatedBy, - DataClass, - DataFileUrl, - Description, - DownloadLink, - FileExtension, - FileExists, - FileSize, - Flag, - Folder, - Generated, - InlineThumbnail, - Inputs, - LastIndexed, - LSID, - Modified, - ModifiedBy, - Name, - ObjectId, // database table only - Outputs, - Properties, - Protocol, - ReferenceCount, - Run, - RunApplication, - RunApplicationOutput, - RunId, // database table only - RowId, - SourceApplicationId, // database table only - SourceApplicationInput, - SourceProtocolApplication, - Thumbnail, - ViewFileLink, - ViewOrDownload, - WebDavUrl, - WebDavUrlRelative; - - public FieldKey fieldKey() - { - return FieldKey.fromParts(name()); - } - } - - void setExperiment(ExpExperiment experiment); - ExpExperiment getExperiment(); - void setRun(ExpRun run); - ExpRun getRun(); - - void setDataType(DataType type); - DataType getDataType(); - - MutableColumnInfo addMaterialInputColumn(String alias, SamplesSchema schema, String inputRole, ExpSampleType sampleType); - MutableColumnInfo addDataInputColumn(String alias, String role); - MutableColumnInfo addInputRunCountColumn(String alias); -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.api.exp.query; + +import org.labkey.api.data.MutableColumnInfo; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpExperiment; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.query.FieldKey; + +public interface ExpDataTable extends ExpTable +{ + enum Column + { + Alias, + ContentLink, + ClassId, // database table only + CpasType, // database table only + Created, + CreatedBy, + DataClass, + DataFileUrl, + Description, + DownloadLink, + FileExtension, + FileExists, + FileSize, + Flag, + Folder, + Generated, + InlineThumbnail, + Inputs, + LastIndexed, + LSID, + Modified, + ModifiedBy, + Name, + ObjectId, // database table only + Outputs, + Properties, + Protocol, + ReferenceCount, + Run, + RunApplication, + RunApplicationOutput, + RunId, // database table only + RowId, + SourceApplicationId, // database table only + SourceApplicationInput, + SourceProtocolApplication, + Thumbnail, + ViewFileLink, + ViewOrDownload, + WebDavUrl, + WebDavUrlRelative; + + public FieldKey fieldKey() + { + return FieldKey.fromParts(name()); + } + } + + void setExperiment(ExpExperiment experiment); + ExpExperiment getExperiment(); + void setRun(ExpRun run); + ExpRun getRun(); + + void setDataType(DataType type); + DataType getDataType(); + + MutableColumnInfo addMaterialInputColumn(String alias, SamplesSchema schema, String inputRole, ExpSampleType sampleType); + MutableColumnInfo addDataInputColumn(String alias, String role); + MutableColumnInfo addInputRunCountColumn(String alias); +} diff --git a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java index 07c52e39623..77bb1032fd1 100644 --- a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java +++ b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java @@ -1,1659 +1,1659 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.collections4.MapUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.LogManager; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.labkey.api.assay.AssayFileWriter; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.attachments.AttachmentParentFactory; -import org.labkey.api.attachments.SpringAttachmentFile; -import org.labkey.api.audit.AuditLogService; -import org.labkey.api.audit.TransactionAuditProvider; -import org.labkey.api.audit.provider.FileSystemAuditProvider; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.Sets; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.DbSequenceManager; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.ImportAliasable; -import org.labkey.api.data.MultiValuedForeignKey; -import org.labkey.api.data.PropertyStorageSpec; -import org.labkey.api.data.RuntimeSQLException; -import org.labkey.api.data.Sort; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.dialect.SqlDialect; -import org.labkey.api.dataiterator.AttachmentDataIterator; -import org.labkey.api.dataiterator.DataIterator; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.DetailedAuditLogDataIterator; -import org.labkey.api.dataiterator.ExistingRecordDataIterator; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.dataiterator.Pump; -import org.labkey.api.dataiterator.StandardDataIteratorBuilder; -import org.labkey.api.dataiterator.TriggerDataBuilderHelper; -import org.labkey.api.dataiterator.WrapperDataIterator; -import org.labkey.api.exceptions.OptimisticConflictException; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.MvColumn; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.list.ListDefinition; -import org.labkey.api.exp.list.ListService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.files.FileContentService; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.ontology.OntologyService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.pipeline.PipeRoot; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.reader.TabLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.UserPrincipal; -import org.labkey.api.security.permissions.AdminPermission; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.test.TestWhen; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.GUID; -import org.labkey.api.util.JunitUtil; -import org.labkey.api.util.TestContext; -import org.labkey.api.util.URIUtil; -import org.labkey.api.view.NotFoundException; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.api.writer.VirtualFile; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.File; -import java.io.IOException; -import java.io.StringReader; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.function.Function; - -import static java.util.Objects.requireNonNull; -import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; -import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; -import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Name; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RowId; -import static org.labkey.api.files.FileContentService.UPLOADED_FILE; -import static org.labkey.api.util.FileUtil.toFileForRead; -import static org.labkey.api.util.FileUtil.toFileForWrite; - -public abstract class AbstractQueryUpdateService implements QueryUpdateService -{ - protected final TableInfo _queryTable; - - private boolean _bulkLoad = false; - private CaseInsensitiveHashMap _columnImportMap = null; - private VirtualFile _att = null; - - /* AbstractQueryUpdateService is generally responsible for some shared functionality - * - triggers - * - coercion/validation - * - detailed logging - * - attachments - * - * If a subclass wants to disable some of these features (w/o subclassing), put flags here... - */ - protected boolean _enableExistingRecordsDataIterator = true; - protected Set _previouslyUpdatedRows = new HashSet<>(); - - protected AbstractQueryUpdateService(TableInfo queryTable) - { - if (queryTable == null) - throw new IllegalArgumentException(); - _queryTable = queryTable; - } - - protected TableInfo getQueryTable() - { - return _queryTable; - } - - public @NotNull Set getPreviouslyUpdatedRows() - { - return _previouslyUpdatedRows == null ? new HashSet<>() : _previouslyUpdatedRows; - } - - @Override - public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class acl) - { - return getQueryTable().hasPermission(user, acl); - } - - protected Map getRow(User user, Container container, Map keys, boolean allowCrossContainer) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - return getRow(user, container, keys); - } - - protected abstract Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException; - - @Override - public List> getRows(User user, Container container, List> keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - if (!hasPermission(user, ReadPermission.class)) - throw new UnauthorizedException("You do not have permission to read data from this table."); - - List> result = new ArrayList<>(); - for (Map rowKeys : keys) - { - Map row = getRow(user, container, rowKeys); - if (row != null) - result.add(row); - } - return result; - } - - @Override - public Map> getExistingRows(User user, Container container, Map> keys, boolean verifyNoCrossFolderData, boolean verifyExisting, @Nullable Set columns) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - if (!hasPermission(user, ReadPermission.class)) - throw new UnauthorizedException("You do not have permission to read data from this table."); - - Map> result = new LinkedHashMap<>(); - for (Map.Entry> key : keys.entrySet()) - { - Map row = getRow(user, container, key.getValue(), verifyNoCrossFolderData); - if (row != null && !row.isEmpty()) - { - result.put(key.getKey(), row); - if (verifyNoCrossFolderData) - { - String dataContainer = (String) row.get("container"); - if (StringUtils.isEmpty(dataContainer)) - dataContainer = (String) row.get("folder"); - if (!container.getId().equals(dataContainer)) - throw new InvalidKeyException("Data does not belong to folder '" + container.getName() + "': " + key.getValue().values()); - } - } - else if (verifyExisting) - throw new InvalidKeyException("Data not found for " + key.getValue().values()); - } - return result; - } - - @Override - public boolean hasExistingRowsInOtherContainers(Container container, Map> keys) - { - return false; - } - - public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction) - { - return createTransactionAuditEvent(container, auditAction, null); - } - - public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction, @Nullable Map details) - { - long auditId = DbSequenceManager.get(ContainerManager.getRoot(), DB_SEQUENCE_NAME).next(); - TransactionAuditProvider.TransactionAuditEvent event = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, auditId); - if (details != null) - event.addDetails(details); - return event; - } - - public static void addTransactionAuditEvent(DbScope.Transaction transaction, User user, TransactionAuditProvider.TransactionAuditEvent auditEvent) - { - UserSchema schema = AuditLogService.getAuditLogSchema(user, ContainerManager.getRoot()); - - if (schema != null) - { - // This is a little hack to ensure that the audit table has actually been created and gets put into the table cache by the time the - // pre-commit task is executed. Otherwise, since the creation of the table happens while within the commit for the - // outermost transaction, it looks like there is a close that hasn't happened when trying to commit the transaction for creating the - // table. - schema.getTable(auditEvent.getEventType(), false); - - transaction.addCommitTask(() -> AuditLogService.get().addEvent(user, auditEvent), DbScope.CommitTaskOption.PRECOMMIT); - - transaction.setAuditEvent(auditEvent); - } - } - - protected final DataIteratorContext getDataIteratorContext(BatchValidationException errors, InsertOption forImport, Map configParameters) - { - if (null == errors) - errors = new BatchValidationException(); - DataIteratorContext context = new DataIteratorContext(errors); - context.setInsertOption(forImport); - context.setConfigParameters(configParameters); - configureDataIteratorContext(context); - recordDataIteratorUsed(configParameters); - - return context; - } - - protected void recordDataIteratorUsed(@Nullable Map configParameters) - { - if (configParameters == null) - return; - - try - { - configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); - } - catch (UnsupportedOperationException ignore) - { - // configParameters is immutable, likely originated from a junit test - } - } - - /** - * If QUS wants to use something other than PKs to select existing rows for merge, it can override this method. - * Used only for generating ExistingRecordDataIterator at the moment. - */ - protected Set getSelectKeys(DataIteratorContext context) - { - if (!context.getAlternateKeys().isEmpty()) - return context.getAlternateKeys(); - return null; - } - - /* - * construct the core DataIterator transformation pipeline for this table, may be just StandardDataIteratorBuilder. - * does NOT handle triggers or the insert/update iterator. - */ - public DataIteratorBuilder createImportDIB(User user, Container container, DataIteratorBuilder data, DataIteratorContext context) - { - DataIteratorBuilder dib = StandardDataIteratorBuilder.forInsert(getQueryTable(), data, container, user); - - if (_enableExistingRecordsDataIterator || context.getInsertOption().updateOnly) - { - // some tables need to generate PKs, so they need to add ExistingRecordDataIterator in persistRows() (after generating PK, before inserting) - dib = ExistingRecordDataIterator.createBuilder(dib, getQueryTable(), getSelectKeys(context)); - } - - dib = ((UpdateableTableInfo) getQueryTable()).persistRows(dib, context); - dib = AttachmentDataIterator.getAttachmentDataIteratorBuilder(getQueryTable(), dib, user, context.getInsertOption().batch ? getAttachmentDirectory() : null, container, getAttachmentParentFactory()); - dib = DetailedAuditLogDataIterator.getDataIteratorBuilder(getQueryTable(), dib, context.getInsertOption(), user, container, null); - return dib; - } - - - /** - * Implementation to use insertRows() while we migrate to using DIB for all code paths - *

- * DataIterator should/must use the same error collection as passed in - */ - @Deprecated - protected int _importRowsUsingInsertRows(User user, Container container, DataIterator rows, BatchValidationException errors, Map extraScriptContext) - { - MapDataIterator mapIterator = DataIteratorUtil.wrapMap(rows, true); - List> list = new ArrayList<>(); - List> ret; - Exception rowException; - - try - { - while (mapIterator.next()) - list.add(mapIterator.getMap()); - ret = insertRows(user, container, list, errors, null, extraScriptContext); - if (errors.hasErrors()) - return 0; - return ret.size(); - } - catch (BatchValidationException x) - { - assert x == errors; - assert x.hasErrors(); - return 0; - } - catch (QueryUpdateServiceException | DuplicateKeyException | SQLException x) - { - rowException = x; - } - finally - { - DataIteratorUtil.closeQuietly(mapIterator); - } - errors.addRowError(new ValidationException(rowException.getMessage())); - return 0; - } - - protected boolean hasImportRowsPermission(User user, Container container, DataIteratorContext context) - { - return hasPermission(user, context.getInsertOption().updateOnly ? UpdatePermission.class : InsertPermission.class); - } - - protected boolean hasInsertRowsPermission(User user) - { - return hasPermission(user, InsertPermission.class); - } - - protected boolean hasDeleteRowsPermission(User user) - { - return hasPermission(user, DeletePermission.class); - } - - protected boolean hasUpdateRowsPermission(User user) - { - return hasPermission(user, UpdatePermission.class); - } - - // override this - protected void preImportDIBValidation(@Nullable DataIteratorBuilder in, @Nullable Collection inputColumns) - { - } - - protected int _importRowsUsingDIB(User user, Container container, DataIteratorBuilder in, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasImportRowsPermission(user, container, context)) - throw new UnauthorizedException("You do not have permission to " + (context.getInsertOption().updateOnly ? "update data in this table." : "insert data into this table.")); - - if (!context.getConfigParameterBoolean(ConfigParameters.SkipInsertOptionValidation)) - assert(getQueryTable().supportsInsertOption(context.getInsertOption())); - - context.getErrors().setExtraContext(extraScriptContext); - if (extraScriptContext != null) - { - context.setDataSource((String) extraScriptContext.get(DataIteratorUtil.DATA_SOURCE)); - } - - preImportDIBValidation(in, null); - - boolean skipTriggers = context.getConfigParameterBoolean(ConfigParameters.SkipTriggers) || context.isCrossTypeImport() || context.isCrossFolderImport(); - boolean hasTableScript = hasTableScript(container); - TriggerDataBuilderHelper helper = new TriggerDataBuilderHelper(getQueryTable(), container, user, extraScriptContext, context.getInsertOption().useImportAliases); - if (!skipTriggers) - { - in = preTriggerDataIterator(in, context); - if (hasTableScript) - in = helper.before(in); - } - DataIteratorBuilder importDIB = createImportDIB(user, container, in, context); - DataIteratorBuilder out = importDIB; - - if (!skipTriggers) - { - if (hasTableScript) - out = helper.after(importDIB); - - out = postTriggerDataIterator(out, context); - } - - if (hasTableScript) - { - context.setFailFast(false); - context.setMaxRowErrors(Math.max(context.getMaxRowErrors(),1000)); - } - int count = _pump(out, outputRows, context); - - if (context.getErrors().hasErrors()) - return 0; - - if (!context.getConfigParameterBoolean(ConfigParameters.SkipAuditSummary)) - _addSummaryAuditEvent(container, user, context, count); - - return count; - } - - protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) - { - return in; - } - - protected DataIteratorBuilder postTriggerDataIterator(DataIteratorBuilder out, DataIteratorContext context) - { - return out; - } - - /** this is extracted so subclasses can add wrap */ - protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> rows, DataIteratorContext context) - { - DataIterator it = etl.getDataIterator(context); - - if (null == it) - return 0; - - try - { - if (null != rows) - { - MapDataIterator maps = DataIteratorUtil.wrapMap(it, false); - it = new WrapperDataIterator(maps) - { - @Override - public boolean next() throws BatchValidationException - { - boolean ret = super.next(); - if (ret) - rows.add(((MapDataIterator)_delegate).getMap()); - return ret; - } - }; - } - - Pump pump = new Pump(it, context); - pump.run(); - - return pump.getRowCount(); - } - finally - { - DataIteratorUtil.closeQuietly(it); - } - } - - /* can be used for simple bookkeeping tasks, per row processing belongs in a data iterator */ - protected void afterInsertUpdate(int count, BatchValidationException errors, boolean isUpdate) - { - afterInsertUpdate(count, errors); - } - - protected void afterInsertUpdate(int count, BatchValidationException errors) - {} - - @Override - public int loadRows(User user, Container container, DataIteratorBuilder rows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - return loadRows(user, container, rows, null, context, extraScriptContext); - } - - public int loadRows(User user, Container container, DataIteratorBuilder rows, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - configureDataIteratorContext(context); - int count = _importRowsUsingDIB(user, container, rows, outputRows, context, extraScriptContext); - afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); - return count; - } - - @Override - public int importRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, Map configParameters, @Nullable Map extraScriptContext) - { - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.IMPORT, configParameters); - int count = _importRowsUsingInsertRows(user, container, rows.getDataIterator(context), errors, extraScriptContext); - afterInsertUpdate(count, errors, context.getInsertOption().updateOnly); - return count; - } - - @Override - public int mergeRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - { - throw new UnsupportedOperationException("merge is not supported for all tables"); - } - - private boolean hasTableScript(Container container) - { - return getQueryTable().hasTriggers(container); - } - - - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException("Not implemented by this QueryUpdateService"); - } - - - protected @Nullable List> _insertRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasInsertRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to insert data into this table."); - - return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); - } - - protected @Nullable List> _insertUpdateRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - DataIteratorBuilder dib = _toDataIteratorBuilder(getClass().getSimpleName() + (context.getInsertOption().updateOnly ? ".updateRows" : ".insertRows()"), rows); - ArrayList> outputRows = new ArrayList<>(); - int count = _importRowsUsingDIB(user, container, dib, outputRows, context, extraScriptContext); - afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); - - if (context.getErrors().hasErrors()) - return null; - - return outputRows; - } - - // not yet supported - protected @Nullable List> _updateRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasUpdateRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to update data in this table."); - - return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); - } - - - protected DataIteratorBuilder _toDataIteratorBuilder(String debugName, List> rows) - { - // TODO probably can't assume all rows have all columns - // TODO can we assume that all rows refer to columns consistently? (not PTID and MouseId for the same column) - // TODO optimize ArrayListMap? - Set colNames; - - if (!rows.isEmpty() && rows.get(0) instanceof ArrayListMap) - { - colNames = ((ArrayListMap)rows.get(0)).getFindMap().keySet(); - } - else - { - // Preserve casing by using wrapped CaseInsensitiveHashMap instead of CaseInsensitiveHashSet - colNames = Sets.newCaseInsensitiveHashSet(); - for (Map row : rows) - colNames.addAll(row.keySet()); - } - - preImportDIBValidation(null, colNames); - return MapDataIterator.of(colNames, rows, debugName); - } - - - /** @deprecated switch to using DIB based method */ - @Deprecated - protected List> _insertRowsUsingInsertRow(User user, Container container, List> rows, BatchValidationException errors, Map extraScriptContext) - throws DuplicateKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasInsertRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to insert data into this table."); - - assert(getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - boolean hasTableScript = hasTableScript(container); - - errors.setExtraContext(extraScriptContext); - if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); - - List> result = new ArrayList<>(rows.size()); - List> providedValues = new ArrayList<>(rows.size()); - for (int i = 0; i < rows.size(); i++) - { - Map row = rows.get(i); - row = normalizeColumnNames(row); - try - { - providedValues.add(new CaseInsensitiveHashMap<>()); - row = coerceTypes(row, providedValues.get(i), false); - if (hasTableScript) - { - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, true, i, row, null, extraScriptContext); - } - row = insertRow(user, container, row); - if (row == null) - continue; - - if (hasTableScript) - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, false, i, row, null, extraScriptContext); - result.add(row); - } - catch (SQLException sqlx) - { - if (StringUtils.startsWith(sqlx.getSQLState(), "22") || RuntimeSQLException.isConstraintException(sqlx)) - { - ValidationException vex = new ValidationException(sqlx.getMessage()); - vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i+1); - errors.addRowError(vex); - } - else if (SqlDialect.isTransactionException(sqlx) && errors.hasErrors()) - { - // if we already have some errors, just break - break; - } - else - { - throw sqlx; - } - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - } - - if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); - - addAuditEvent(user, container, QueryService.AuditAction.INSERT, null, result, null, providedValues); - - return result; - } - - protected void addAuditEvent(User user, Container container, QueryService.AuditAction auditAction, @Nullable Map configParameters, @Nullable List> rows, @Nullable List> existingRows, @Nullable List> providedValues) - { - if (!isBulkLoad()) - { - AuditBehaviorType auditBehavior = configParameters != null ? (AuditBehaviorType) configParameters.get(AuditBehavior) : null; - String userComment = configParameters == null ? null : (String) configParameters.get(AuditUserComment); - getQueryTable().getAuditHandler(auditBehavior) - .addAuditEvent(user, container, getQueryTable(), auditBehavior, userComment, auditAction, rows, existingRows, providedValues); - } - } - - private Map normalizeColumnNames(Map row) - { - if(_columnImportMap == null) - { - _columnImportMap = (CaseInsensitiveHashMap)ImportAliasable.Helper.createImportMap(getQueryTable().getColumns(), false); - } - - Map newRow = new CaseInsensitiveHashMap<>(); - CaseInsensitiveHashSet columns = new CaseInsensitiveHashSet(); - columns.addAll(row.keySet()); - - String newName; - for(String key : row.keySet()) - { - if(_columnImportMap.containsKey(key)) - { - //it is possible for a normalized name to conflict with an existing property. if so, defer to the original - newName = _columnImportMap.get(key).getName(); - if(!columns.contains(newName)){ - newRow.put(newName, row.get(key)); - continue; - } - } - newRow.put(key, row.get(key)); - } - - return newRow; - } - - @Override - public List> insertRows(User user, Container container, List> rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - throws DuplicateKeyException, QueryUpdateServiceException, SQLException - { - try - { - List> ret = _insertRowsUsingInsertRow(user, container, rows, errors, extraScriptContext); - afterInsertUpdate(null==ret?0:ret.size(), errors); - if (errors.hasErrors()) - return null; - return ret; - } - catch (BatchValidationException x) - { - assert x == errors; - assert x.hasErrors(); - } - return null; - } - - protected Object coerceTypesValue(ColumnInfo col, Map providedValues, String key, Object value) - { - if (col != null && value != null && - !col.getJavaObjectClass().isInstance(value) && - !(value instanceof AttachmentFile) && - !(value instanceof MultipartFile) && - !(value instanceof String[]) && - !(col.isMultiValued() || col.getFk() instanceof MultiValuedForeignKey)) - { - try - { - if (col.getKindOfQuantity() != null) - providedValues.put(key, value); - if (PropertyType.FILE_LINK.equals(col.getPropertyType())) - value = ExpDataFileConverter.convert(value); - else - value = col.convert(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw e; - } - catch (ConversionException e) - { - // That's OK, the transformation script may be able to fix up the value before it gets inserted - } - } - - return value; - } - - /** Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion */ - @Deprecated - protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) - { - Map result = new CaseInsensitiveHashMap<>(row.size()); - Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); - for (Map.Entry entry : row.entrySet()) - { - ColumnInfo col = columnMap.get(entry.getKey()); - Object value = coerceTypesValue(col, providedValues, entry.getKey(), entry.getValue()); - result.put(entry.getKey(), value); - } - - return result; - } - - protected abstract Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; - - - protected boolean firstUpdateRow = true; - Function,Map> updateTransform = Function.identity(); - - /* Do standard AQUS stuff here, then call the subclass specific implementation of updateRow() */ - final protected Map updateOneRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - if (firstUpdateRow) - { - firstUpdateRow = false; - if (null != OntologyService.get()) - { - var t = OntologyService.get().getConceptUpdateHandler(_queryTable); - if (null != t) - updateTransform = t; - } - } - row = updateTransform.apply(row); - return updateRow(user, container, row, oldRow, configParameters); - } - - // used by updateRows to check if all rows have the same set of keys - // prepared statement can only be used to updateRows if all rows have the same set of keys - protected static boolean hasUniformKeys(List> rowsToUpdate) - { - if (rowsToUpdate == null || rowsToUpdate.isEmpty()) - return false; - - if (rowsToUpdate.size() == 1) - return true; - - Set keys = rowsToUpdate.get(0).keySet(); - int keySize = keys.size(); - - for (int i = 1 ; i < rowsToUpdate.size(); i ++) - { - Set otherKeys = rowsToUpdate.get(i).keySet(); - if (otherKeys.size() != keySize) - return false; - if (!otherKeys.containsAll(keys)) - return false; - } - - return true; - } - - @Override - public List> updateRows(User user, Container container, List> rows, List> oldKeys, - BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasUpdateRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to update data in this table."); - - if (oldKeys != null && rows.size() != oldKeys.size()) - throw new IllegalArgumentException("rows and oldKeys are required to be the same length, but were " + rows.size() + " and " + oldKeys + " in length, respectively"); - - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); - - List> result = new ArrayList<>(rows.size()); - List> oldRows = new ArrayList<>(rows.size()); - List> providedValues = new ArrayList<>(rows.size()); - // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container - boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; - - for (int i = 0; i < rows.size(); i++) - { - Map row = rows.get(i); - providedValues.add(new CaseInsensitiveHashMap<>()); - row = coerceTypes(row, providedValues.get(i), true); - try - { - Map oldKey = oldKeys == null ? row : oldKeys.get(i); - Map oldRow = null; - if (!streaming) - { - oldRow = getRow(user, container, oldKey); - if (oldRow == null) - throw new NotFoundException("The existing row was not found."); - } - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, true, i, row, oldRow, extraScriptContext); - Map updatedRow = updateOneRow(user, container, row, oldRow, configParameters); - if (!streaming && updatedRow == null) - continue; - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow, extraScriptContext); - if (!streaming) - { - result.add(updatedRow); - oldRows.add(oldRow); - } - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (OptimisticConflictException e) - { - errors.addRowError(new ValidationException("Unable to update. Row may have been deleted.")); - } - } - - // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); - afterInsertUpdate(null==result?0:result.size(), errors, true); - - if (errors.hasErrors()) - throw errors; - - addAuditEvent(user, container, QueryService.AuditAction.UPDATE, configParameters, result, oldRows, providedValues); - - return result; - } - - protected void validatePartitionedRowKeys(Collection columns) - { - // do nothing - } - - public List> updateRowsUsingPartitionedDIB( - DbScope.Transaction tx, - User user, - Container container, - List> rows, - BatchValidationException errors, - @Nullable Map configParameters, - Map extraScriptContext - ) - { - int index = 0; - int numPartitions = 0; - List> ret = new ArrayList<>(); - - Set observedRowIds = new HashSet<>(); - Set observedNames = new CaseInsensitiveHashSet(); - - while (index < rows.size()) - { - CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); - - validatePartitionedRowKeys(rowKeys); - - int nextIndex = index + 1; - while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) - nextIndex++; - - List> rowsToProcess = rows.subList(index, nextIndex); - index = nextIndex; - numPartitions++; - - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, configParameters); - - // skip audit summary for the partitions, we will perform it once at the end - context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); - - List> subRet = _updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); - - // we need to throw if we don't want executeWithRetry() attempt commit() - if (context.getErrors().hasErrors()) - throw new DbScope.RetryPassthroughException(context.getErrors()); - - if (subRet != null) - { - ret.addAll(subRet); - - // Check if duplicate rows have been processed across the partitions - // Only start checking for duplicates after the first partition has been processed. - if (numPartitions > 1) - { - // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition - checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); - } - - if (errors.hasErrors()) - throw new DbScope.RetryPassthroughException(errors); - } - } - - if (numPartitions > 1) - { - var auditEvent = tx.getAuditEvent(); - if (auditEvent != null) - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); - } - - _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, configParameters), ret.size()); - - return ret; - } - - private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) - { - for (Map row : partitionRows) - { - Long rowId = MapUtils.getLong(row, RowId.name()); - if (rowId != null && !globalRowIds.add(rowId)) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); - return; - } - - Object nameObj = row.get(Name.name()); - if (nameObj != null && !globalNames.add(nameObj.toString())) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); - return; - } - } - } - - protected void checkDuplicateUpdate(Object pkVals) throws ValidationException - { - if (pkVals == null) - return; - - Set updatedRows = getPreviouslyUpdatedRows(); - - Object[] keysObj; - if (pkVals.getClass().isArray()) - keysObj = (Object[]) pkVals; - else if (pkVals instanceof Map map) - { - List orderedKeyVals = new ArrayList<>(); - SortedSet sortedKeys = new TreeSet<>(map.keySet()); - for (String key : sortedKeys) - orderedKeyVals.add(map.get(key)); - keysObj = orderedKeyVals.toArray(); - } - else - keysObj = new Object[]{pkVals}; - - if (keysObj.length == 1) - { - if (updatedRows.contains(keysObj[0])) - throw new ValidationException("Duplicate key provided: " + keysObj[0]); - updatedRows.add(keysObj[0]); - return; - } - - List keys = new ArrayList<>(); - for (Object key : keysObj) - keys.add(String.valueOf(key)); - if (updatedRows.contains(keys)) - throw new ValidationException("Duplicate key provided: " + StringUtils.join(keys, ", ")); - updatedRows.add(keys); - } - - @Override - public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException("Move is not supported for this table type."); - } - - protected abstract Map deleteRow(User user, Container container, Map oldRow) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; - - protected Map deleteRow(User user, Container container, Map oldRow, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return deleteRow(user, container, oldRow); - } - - @Override - public List> deleteRows(User user, Container container, List> keys, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasDeleteRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to delete data from this table."); - - BatchValidationException errors = new BatchValidationException(); - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); - - // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container - boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; - - List> result = new ArrayList<>(keys.size()); - for (int i = 0; i < keys.size(); i++) - { - Map key = keys.get(i); - try - { - Map oldRow = null; - if (!streaming) - { - oldRow = getRow(user, container, key); - // if row doesn't exist, bail early - if (oldRow == null) - continue; - } - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, true, i, null, oldRow, extraScriptContext); - Map updatedRow = deleteRow(user, container, oldRow, configParameters, extraScriptContext); - if (!streaming && updatedRow == null) - continue; - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, false, i, null, updatedRow, extraScriptContext); - result.add(updatedRow); - } - catch (InvalidKeyException ex) - { - ValidationException vex = new ValidationException(ex.getMessage()); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - } - - // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); - - addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, result, null, null); - - return result; - } - - protected int truncateRows(User user, Container container) - throws QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException(); - } - - @Override - public int truncateRows(User user, Container container, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!container.hasPermission(user, AdminPermission.class) && !hasDeleteRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to truncate this table."); - - BatchValidationException errors = new BatchValidationException(); - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, true, errors, extraScriptContext); - - int result = truncateRows(user, container); - - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, false, errors, extraScriptContext); - addAuditEvent(user, container, QueryService.AuditAction.TRUNCATE, configParameters, null, null, null); - - return result; - } - - @Override - public void setBulkLoad(boolean bulkLoad) - { - _bulkLoad = bulkLoad; - } - - @Override - public boolean isBulkLoad() - { - return _bulkLoad; - } - - public static Object saveFile(User user, Container container, String name, Object value, @Nullable String dirName) throws ValidationException, QueryUpdateServiceException - { - FileLike dirPath = AssayFileWriter.getUploadDirectoryPath(container, dirName); - return saveFile(user, container, name, value, dirPath); - } - - /** - * Save uploaded file to dirName directory under file or pipeline root. - */ - public static Object saveFile(User user, Container container, String name, Object value, @Nullable FileLike dirPath) throws ValidationException, QueryUpdateServiceException - { - if (!(value instanceof MultipartFile) && !(value instanceof SpringAttachmentFile)) - throw new ValidationException("Invalid file value"); - - String auditMessageFormat = "Saved file '%s' for field '%s' in folder %s."; - FileLike file = null; - try - { - FileLike dir = AssayFileWriter.ensureUploadDirectory(dirPath); - - FileSystemAuditProvider.FileSystemAuditEvent event = new FileSystemAuditProvider.FileSystemAuditEvent(container, null); - if (value instanceof MultipartFile multipartFile) - { - // Once we've found one, write it to disk and replace the row's value with just the File reference to it - if (multipartFile.isEmpty()) - { - throw new ValidationException("File " + multipartFile.getOriginalFilename() + " for field " + name + " has no content"); - } - file = FileUtil.findUniqueFileName(multipartFile.getOriginalFilename(), dir); - checkFileUnderRoot(container, file); - multipartFile.transferTo(toFileForWrite(file)); - event.setComment(String.format(auditMessageFormat, multipartFile.getOriginalFilename(), name, container.getPath())); - event.setProvidedFileName(multipartFile.getOriginalFilename()); - } - else - { - SpringAttachmentFile saf = (SpringAttachmentFile) value; - file = FileUtil.findUniqueFileName(saf.getFilename(), dir); - checkFileUnderRoot(container, file); - saf.saveTo(file); - event.setComment(String.format(auditMessageFormat, saf.getFilename(), name, container.getPath())); - event.setProvidedFileName(saf.getFilename()); - } - event.setFile(file.getName()); - event.setFieldName(name); - event.setDirectory(file.getParent().toURI().getPath()); - AuditLogService.get().addEvent(user, event); - } - catch (IOException | ExperimentException e) - { - throw new QueryUpdateServiceException(e); - } - - ensureExpData(user, container, file.toNioPathForRead().toFile()); - return file; - } - - public static ExpData ensureExpData(User user, Container container, File file) - { - ExpData existingData = ExperimentService.get().getExpDataByURL(file, container); - // create exp.data record - if (existingData == null) - { - File canonicalFile = FileUtil.getAbsoluteCaseSensitiveFile(file); - ExpData data = ExperimentService.get().createData(container, UPLOADED_FILE); - data.setName(file.getName()); - data.setDataFileURI(canonicalFile.toPath().toUri()); - if (data.getDataFileUrl() != null && data.getDataFileUrl().length() <= ExperimentService.get().getTinfoData().getColumn("DataFileURL").getScale()) - { - // If the path is too long to store, bail out without creating an exp.data row - data.save(user); - } - - return data; - } - - return existingData; - } - - // For security reasons, make sure the user hasn't tried to reference a file that's not under - // the pipeline root or @assayfiles root. Otherwise, they could get access to any file on the server - static FileLike checkFileUnderRoot(Container container, FileLike file) throws ExperimentException - { - Path assayFilesRoot = FileContentService.get().getFileRootPath(container, FileContentService.ContentType.assayfiles); - if (assayFilesRoot != null && URIUtil.isDescendant(assayFilesRoot.toUri(), file.toURI())) - return file; - - PipeRoot root = PipelineService.get().findPipelineRoot(container); - if (root == null) - throw new ExperimentException("Pipeline root not available in container " + container.getPath()); - - if (!root.isUnderRoot(toFileForRead(file))) - { - throw new ExperimentException("Cannot reference file '" + file + "' from " + container.getPath()); - } - - return file; - } - - protected void _addSummaryAuditEvent(Container container, User user, DataIteratorContext context, int count) - { - if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level - { - AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); - String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); - boolean skipAuditLevelCheck = false; - if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) - { - if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad - skipAuditLevelCheck = true; - } - getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); - } - } - - /** - * Is used by the AttachmentDataIterator to point to the location of the serialized - * attachment files. - */ - public void setAttachmentDirectory(VirtualFile att) - { - _att = att; - } - - @Nullable - protected VirtualFile getAttachmentDirectory() - { - return _att; - } - - /** - * QUS instances that allow import of attachments through the AttachmentDataIterator should furnish a factory - * implementation in order to resolve the attachment parent on incoming attachment files. - */ - @Nullable - protected AttachmentParentFactory getAttachmentParentFactory() - { - return null; - } - - /** Translate between the column name that query is exposing to the column name that actually lives in the database */ - protected static void aliasColumns(Map columnMapping, Map row) - { - for (Map.Entry entry : columnMapping.entrySet()) - { - if (row.containsKey(entry.getValue()) && !row.containsKey(entry.getKey())) - { - row.put(entry.getKey(), row.get(entry.getValue())); - } - } - } - - /** - * The database table has underscores for MV column names, but we expose a column without the underscore. - * Therefore, we need to translate between the two sets of column names. - * @return database column name -> exposed TableInfo column name - */ - protected static Map createMVMapping(Domain domain) - { - Map result = new CaseInsensitiveHashMap<>(); - if (domain != null) - { - for (DomainProperty domainProperty : domain.getProperties()) - { - if (domainProperty.isMvEnabled()) - { - result.put(PropertyStorageSpec.getMvIndicatorStorageColumnName(domainProperty.getPropertyDescriptor()), domainProperty.getName() + MvColumn.MV_INDICATOR_SUFFIX); - } - } - } - return result; - } - - @TestWhen(TestWhen.When.BVT) - public static class TestCase extends Assert - { - private boolean _useAlias = false; - - static TabLoader getTestData() throws IOException - { - TabLoader testData = new TabLoader(new StringReader("pk,i,s\n0,0,zero\n1,1,one\n2,2,two"),true); - testData.parseAsCSV(); - testData.getColumns()[0].clazz = Integer.class; - testData.getColumns()[1].clazz = Integer.class; - testData.getColumns()[2].clazz = String.class; - return testData; - } - - @BeforeClass - public static void createList() throws Exception - { - if (null == ListService.get()) - return; - deleteList(); - - TabLoader testData = getTestData(); - String hash = GUID.makeHash(); - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - ListService s = ListService.get(); - UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); - assertNotNull(lists); - - ListDefinition R = s.createList(c, "R", ListDefinition.KeyType.Integer); - R.setKeyName("pk"); - Domain d = requireNonNull(R.getDomain()); - for (int i=0 ; i> getRows() - { - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); - TableInfo rTableInfo = requireNonNull(lists.getTable("R", null)); - return Arrays.asList(new TableSelector(rTableInfo, TableSelector.ALL_COLUMNS, null, new Sort("PK")).getMapArray()); - } - - @Before - public void resetList() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - qus.truncateRows(user, c, null, null); - } - - @AfterClass - public static void deleteList() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - ListService s = ListService.get(); - Map m = s.getLists(c); - if (m.containsKey("R")) - m.get("R").delete(user); - } - - void validateDefaultData(List> rows) - { - assertEquals(3, rows.size()); - - assertEquals(0, rows.get(0).get("pk")); - assertEquals(1, rows.get(1).get("pk")); - assertEquals(2, rows.get(2).get("pk")); - - assertEquals(0, rows.get(0).get("i")); - assertEquals(1, rows.get(1).get("i")); - assertEquals(2, rows.get(2).get("i")); - - assertEquals("zero", rows.get(0).get("s")); - assertEquals("one", rows.get(1).get("s")); - assertEquals("two", rows.get(2).get("s")); - } - - @Test - public void INSERT() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - assert(getRows().isEmpty()); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - BatchValidationException errors = new BatchValidationException(); - var rows = qus.insertRows(user, c, getTestData().load(), errors, null, null); - assertFalse(errors.hasErrors()); - validateDefaultData(rows); - validateDefaultData(getRows()); - - qus.insertRows(user, c, getTestData().load(), errors, null, null); - assertTrue(errors.hasErrors()); - } - - @Test - public void UPSERT() throws Exception - { - if (null == ListService.get()) - return; - /* not sure how you use/test ImportOptions.UPSERT - * the only row returning QUS method is insertRows(), which doesn't let you specify the InsertOption? - */ - } - - @Test - public void IMPORT() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - assert(getRows().isEmpty()); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - BatchValidationException errors = new BatchValidationException(); - var count = qus.importRows(user, c, getTestData(), errors, null, null); - assertFalse(errors.hasErrors()); - assert(count == 3); - validateDefaultData(getRows()); - - qus.importRows(user, c, getTestData(), errors, null, null); - assertTrue(errors.hasErrors()); - } - - @Test - public void MERGE() throws Exception - { - if (null == ListService.get()) - return; - INSERT(); - assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var mergeRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); - BatchValidationException errors = new BatchValidationException() - { - @Override - public void addRowError(ValidationException vex) - { - LogManager.getLogger(AbstractQueryUpdateService.class).error("test error", vex); - fail(vex.getMessage()); - } - }; - int count=0; - try (var tx = rTableInfo.getSchema().getScope().ensureTransaction()) - { - var ret = qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); - if (!errors.hasErrors()) - { - tx.commit(); - count = ret; - } - } - assertFalse("mergeRows error(s): " + errors.getMessage(), errors.hasErrors()); - assertEquals(2, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO", rows.get(2).get("s")); - // test existing row value is not updated - assertEquals(2, rows.get(2).get("i")); - // test new row - assertEquals("THREE", rows.get(3).get("s")); - assertNull(rows.get(3).get("i")); - - // merge should fail if duplicate keys are provided - errors = new BatchValidationException(); - mergeRows = new ArrayList<>(); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); - qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); - assertTrue(errors.hasErrors()); - assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); - } - - @Test - public void UPDATE() throws Exception - { - if (null == ListService.get()) - return; - INSERT(); - assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var updateRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - - // update using data iterator - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP")); - DataIteratorContext context = new DataIteratorContext(); - context.setInsertOption(InsertOption.UPDATE); - var count = qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertFalse(context.getErrors().hasErrors()); - assertEquals(1, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO-UP", rows.get(2).get("s")); - // test existing row value is not updated/erased - assertEquals(2, rows.get(2).get("i")); - - // update should fail if a new record is provided - updateRows = new ArrayList<>(); - updateRows.add(CaseInsensitiveHashMap.of(pkName,123,colName,"NEW")); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertTrue(context.getErrors().hasErrors()); - - // Issue 52728: update should fail if duplicate key is provide - updateRows = new ArrayList<>(); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); - - // use DIB - context = new DataIteratorContext(); - context.setInsertOption(InsertOption.UPDATE); - qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertTrue(context.getErrors().hasErrors()); - assertTrue("Duplicate key error: " + context.getErrors().getMessage(), context.getErrors().getMessage().contains("Duplicate key provided: 2")); - - // use updateRows - if (!_useAlias) // _update using alias is not supported - { - BatchValidationException errors = new BatchValidationException(); - try - { - qus.updateRows(user, c, updateRows, null, errors, null, null); - } - catch (Exception e) - { - - } - assertTrue(errors.hasErrors()); - assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); - - } - } - - @Test - public void REPLACE() throws Exception - { - if (null == ListService.get()) - return; - assert(getRows().isEmpty()); - INSERT(); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var mergeRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); - DataIteratorContext context = new DataIteratorContext(); - context.setInsertOption(InsertOption.REPLACE); - var count = qus.loadRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), context, null); - assertFalse(context.getErrors().hasErrors()); - assertEquals(2, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO", rows.get(2).get("s")); - // test existing row value is updated - assertNull(rows.get(2).get("i")); - // test new row - assertEquals("THREE", rows.get(3).get("s")); - assertNull(rows.get(3).get("i")); - } - - @Test - public void IMPORT_IDENTITY() - { - if (null == ListService.get()) - return; - // TODO - } - - @Test - public void ALIAS_MERGE() throws Exception - { - _useAlias = true; - MERGE(); - } - - @Test - public void ALIAS_REPLACE() throws Exception - { - _useAlias = true; - REPLACE(); - } - - @Test - public void ALIAS_UPDATE() throws Exception - { - _useAlias = true; - UPDATE(); - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.collections4.MapUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.labkey.api.assay.AssayFileWriter; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.attachments.AttachmentParentFactory; +import org.labkey.api.attachments.SpringAttachmentFile; +import org.labkey.api.audit.AuditLogService; +import org.labkey.api.audit.TransactionAuditProvider; +import org.labkey.api.audit.provider.FileSystemAuditProvider; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.Sets; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.DbSequenceManager; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.ImportAliasable; +import org.labkey.api.data.MultiValuedForeignKey; +import org.labkey.api.data.PropertyStorageSpec; +import org.labkey.api.data.RuntimeSQLException; +import org.labkey.api.data.Sort; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.dialect.SqlDialect; +import org.labkey.api.dataiterator.AttachmentDataIterator; +import org.labkey.api.dataiterator.DataIterator; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.DetailedAuditLogDataIterator; +import org.labkey.api.dataiterator.ExistingRecordDataIterator; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.dataiterator.Pump; +import org.labkey.api.dataiterator.StandardDataIteratorBuilder; +import org.labkey.api.dataiterator.TriggerDataBuilderHelper; +import org.labkey.api.dataiterator.WrapperDataIterator; +import org.labkey.api.exceptions.OptimisticConflictException; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.MvColumn; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.list.ListDefinition; +import org.labkey.api.exp.list.ListService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.files.FileContentService; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.ontology.OntologyService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.pipeline.PipeRoot; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.reader.TabLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.UserPrincipal; +import org.labkey.api.security.permissions.AdminPermission; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.test.TestWhen; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.GUID; +import org.labkey.api.util.JunitUtil; +import org.labkey.api.util.TestContext; +import org.labkey.api.util.URIUtil; +import org.labkey.api.view.NotFoundException; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.api.writer.VirtualFile; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.File; +import java.io.IOException; +import java.io.StringReader; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Function; + +import static java.util.Objects.requireNonNull; +import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; +import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; +import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.Name; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.RowId; +import static org.labkey.api.files.FileContentService.UPLOADED_FILE; +import static org.labkey.api.util.FileUtil.toFileForRead; +import static org.labkey.api.util.FileUtil.toFileForWrite; + +public abstract class AbstractQueryUpdateService implements QueryUpdateService +{ + protected final TableInfo _queryTable; + + private boolean _bulkLoad = false; + private CaseInsensitiveHashMap _columnImportMap = null; + private VirtualFile _att = null; + + /* AbstractQueryUpdateService is generally responsible for some shared functionality + * - triggers + * - coercion/validation + * - detailed logging + * - attachments + * + * If a subclass wants to disable some of these features (w/o subclassing), put flags here... + */ + protected boolean _enableExistingRecordsDataIterator = true; + protected Set _previouslyUpdatedRows = new HashSet<>(); + + protected AbstractQueryUpdateService(TableInfo queryTable) + { + if (queryTable == null) + throw new IllegalArgumentException(); + _queryTable = queryTable; + } + + protected TableInfo getQueryTable() + { + return _queryTable; + } + + public @NotNull Set getPreviouslyUpdatedRows() + { + return _previouslyUpdatedRows == null ? new HashSet<>() : _previouslyUpdatedRows; + } + + @Override + public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class acl) + { + return getQueryTable().hasPermission(user, acl); + } + + protected Map getRow(User user, Container container, Map keys, boolean allowCrossContainer) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + return getRow(user, container, keys); + } + + protected abstract Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException; + + @Override + public List> getRows(User user, Container container, List> keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + if (!hasPermission(user, ReadPermission.class)) + throw new UnauthorizedException("You do not have permission to read data from this table."); + + List> result = new ArrayList<>(); + for (Map rowKeys : keys) + { + Map row = getRow(user, container, rowKeys); + if (row != null) + result.add(row); + } + return result; + } + + @Override + public Map> getExistingRows(User user, Container container, Map> keys, boolean verifyNoCrossFolderData, boolean verifyExisting, @Nullable Set columns) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + if (!hasPermission(user, ReadPermission.class)) + throw new UnauthorizedException("You do not have permission to read data from this table."); + + Map> result = new LinkedHashMap<>(); + for (Map.Entry> key : keys.entrySet()) + { + Map row = getRow(user, container, key.getValue(), verifyNoCrossFolderData); + if (row != null && !row.isEmpty()) + { + result.put(key.getKey(), row); + if (verifyNoCrossFolderData) + { + String dataContainer = (String) row.get("container"); + if (StringUtils.isEmpty(dataContainer)) + dataContainer = (String) row.get("folder"); + if (!container.getId().equals(dataContainer)) + throw new InvalidKeyException("Data does not belong to folder '" + container.getName() + "': " + key.getValue().values()); + } + } + else if (verifyExisting) + throw new InvalidKeyException("Data not found for " + key.getValue().values()); + } + return result; + } + + @Override + public boolean hasExistingRowsInOtherContainers(Container container, Map> keys) + { + return false; + } + + public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction) + { + return createTransactionAuditEvent(container, auditAction, null); + } + + public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction, @Nullable Map details) + { + long auditId = DbSequenceManager.get(ContainerManager.getRoot(), DB_SEQUENCE_NAME).next(); + TransactionAuditProvider.TransactionAuditEvent event = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, auditId); + if (details != null) + event.addDetails(details); + return event; + } + + public static void addTransactionAuditEvent(DbScope.Transaction transaction, User user, TransactionAuditProvider.TransactionAuditEvent auditEvent) + { + UserSchema schema = AuditLogService.getAuditLogSchema(user, ContainerManager.getRoot()); + + if (schema != null) + { + // This is a little hack to ensure that the audit table has actually been created and gets put into the table cache by the time the + // pre-commit task is executed. Otherwise, since the creation of the table happens while within the commit for the + // outermost transaction, it looks like there is a close that hasn't happened when trying to commit the transaction for creating the + // table. + schema.getTable(auditEvent.getEventType(), false); + + transaction.addCommitTask(() -> AuditLogService.get().addEvent(user, auditEvent), DbScope.CommitTaskOption.PRECOMMIT); + + transaction.setAuditEvent(auditEvent); + } + } + + protected final DataIteratorContext getDataIteratorContext(BatchValidationException errors, InsertOption forImport, Map configParameters) + { + if (null == errors) + errors = new BatchValidationException(); + DataIteratorContext context = new DataIteratorContext(errors); + context.setInsertOption(forImport); + context.setConfigParameters(configParameters); + configureDataIteratorContext(context); + recordDataIteratorUsed(configParameters); + + return context; + } + + protected void recordDataIteratorUsed(@Nullable Map configParameters) + { + if (configParameters == null) + return; + + try + { + configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); + } + catch (UnsupportedOperationException ignore) + { + // configParameters is immutable, likely originated from a junit test + } + } + + /** + * If QUS wants to use something other than PKs to select existing rows for merge, it can override this method. + * Used only for generating ExistingRecordDataIterator at the moment. + */ + protected Set getSelectKeys(DataIteratorContext context) + { + if (!context.getAlternateKeys().isEmpty()) + return context.getAlternateKeys(); + return null; + } + + /* + * construct the core DataIterator transformation pipeline for this table, may be just StandardDataIteratorBuilder. + * does NOT handle triggers or the insert/update iterator. + */ + public DataIteratorBuilder createImportDIB(User user, Container container, DataIteratorBuilder data, DataIteratorContext context) + { + DataIteratorBuilder dib = StandardDataIteratorBuilder.forInsert(getQueryTable(), data, container, user); + + if (_enableExistingRecordsDataIterator || context.getInsertOption().updateOnly) + { + // some tables need to generate PKs, so they need to add ExistingRecordDataIterator in persistRows() (after generating PK, before inserting) + dib = ExistingRecordDataIterator.createBuilder(dib, getQueryTable(), getSelectKeys(context)); + } + + dib = ((UpdateableTableInfo) getQueryTable()).persistRows(dib, context); + dib = AttachmentDataIterator.getAttachmentDataIteratorBuilder(getQueryTable(), dib, user, context.getInsertOption().batch ? getAttachmentDirectory() : null, container, getAttachmentParentFactory()); + dib = DetailedAuditLogDataIterator.getDataIteratorBuilder(getQueryTable(), dib, context.getInsertOption(), user, container, null); + return dib; + } + + + /** + * Implementation to use insertRows() while we migrate to using DIB for all code paths + *

+ * DataIterator should/must use the same error collection as passed in + */ + @Deprecated + protected int _importRowsUsingInsertRows(User user, Container container, DataIterator rows, BatchValidationException errors, Map extraScriptContext) + { + MapDataIterator mapIterator = DataIteratorUtil.wrapMap(rows, true); + List> list = new ArrayList<>(); + List> ret; + Exception rowException; + + try + { + while (mapIterator.next()) + list.add(mapIterator.getMap()); + ret = insertRows(user, container, list, errors, null, extraScriptContext); + if (errors.hasErrors()) + return 0; + return ret.size(); + } + catch (BatchValidationException x) + { + assert x == errors; + assert x.hasErrors(); + return 0; + } + catch (QueryUpdateServiceException | DuplicateKeyException | SQLException x) + { + rowException = x; + } + finally + { + DataIteratorUtil.closeQuietly(mapIterator); + } + errors.addRowError(new ValidationException(rowException.getMessage())); + return 0; + } + + protected boolean hasImportRowsPermission(User user, Container container, DataIteratorContext context) + { + return hasPermission(user, context.getInsertOption().updateOnly ? UpdatePermission.class : InsertPermission.class); + } + + protected boolean hasInsertRowsPermission(User user) + { + return hasPermission(user, InsertPermission.class); + } + + protected boolean hasDeleteRowsPermission(User user) + { + return hasPermission(user, DeletePermission.class); + } + + protected boolean hasUpdateRowsPermission(User user) + { + return hasPermission(user, UpdatePermission.class); + } + + // override this + protected void preImportDIBValidation(@Nullable DataIteratorBuilder in, @Nullable Collection inputColumns) + { + } + + protected int _importRowsUsingDIB(User user, Container container, DataIteratorBuilder in, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasImportRowsPermission(user, container, context)) + throw new UnauthorizedException("You do not have permission to " + (context.getInsertOption().updateOnly ? "update data in this table." : "insert data into this table.")); + + if (!context.getConfigParameterBoolean(ConfigParameters.SkipInsertOptionValidation)) + assert(getQueryTable().supportsInsertOption(context.getInsertOption())); + + context.getErrors().setExtraContext(extraScriptContext); + if (extraScriptContext != null) + { + context.setDataSource((String) extraScriptContext.get(DataIteratorUtil.DATA_SOURCE)); + } + + preImportDIBValidation(in, null); + + boolean skipTriggers = context.getConfigParameterBoolean(ConfigParameters.SkipTriggers) || context.isCrossTypeImport() || context.isCrossFolderImport(); + boolean hasTableScript = hasTableScript(container); + TriggerDataBuilderHelper helper = new TriggerDataBuilderHelper(getQueryTable(), container, user, extraScriptContext, context.getInsertOption().useImportAliases); + if (!skipTriggers) + { + in = preTriggerDataIterator(in, context); + if (hasTableScript) + in = helper.before(in); + } + DataIteratorBuilder importDIB = createImportDIB(user, container, in, context); + DataIteratorBuilder out = importDIB; + + if (!skipTriggers) + { + if (hasTableScript) + out = helper.after(importDIB); + + out = postTriggerDataIterator(out, context); + } + + if (hasTableScript) + { + context.setFailFast(false); + context.setMaxRowErrors(Math.max(context.getMaxRowErrors(),1000)); + } + int count = _pump(out, outputRows, context); + + if (context.getErrors().hasErrors()) + return 0; + + if (!context.getConfigParameterBoolean(ConfigParameters.SkipAuditSummary)) + _addSummaryAuditEvent(container, user, context, count); + + return count; + } + + protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) + { + return in; + } + + protected DataIteratorBuilder postTriggerDataIterator(DataIteratorBuilder out, DataIteratorContext context) + { + return out; + } + + /** this is extracted so subclasses can add wrap */ + protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> rows, DataIteratorContext context) + { + DataIterator it = etl.getDataIterator(context); + + if (null == it) + return 0; + + try + { + if (null != rows) + { + MapDataIterator maps = DataIteratorUtil.wrapMap(it, false); + it = new WrapperDataIterator(maps) + { + @Override + public boolean next() throws BatchValidationException + { + boolean ret = super.next(); + if (ret) + rows.add(((MapDataIterator)_delegate).getMap()); + return ret; + } + }; + } + + Pump pump = new Pump(it, context); + pump.run(); + + return pump.getRowCount(); + } + finally + { + DataIteratorUtil.closeQuietly(it); + } + } + + /* can be used for simple bookkeeping tasks, per row processing belongs in a data iterator */ + protected void afterInsertUpdate(int count, BatchValidationException errors, boolean isUpdate) + { + afterInsertUpdate(count, errors); + } + + protected void afterInsertUpdate(int count, BatchValidationException errors) + {} + + @Override + public int loadRows(User user, Container container, DataIteratorBuilder rows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + return loadRows(user, container, rows, null, context, extraScriptContext); + } + + public int loadRows(User user, Container container, DataIteratorBuilder rows, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + configureDataIteratorContext(context); + int count = _importRowsUsingDIB(user, container, rows, outputRows, context, extraScriptContext); + afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); + return count; + } + + @Override + public int importRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, Map configParameters, @Nullable Map extraScriptContext) + { + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.IMPORT, configParameters); + int count = _importRowsUsingInsertRows(user, container, rows.getDataIterator(context), errors, extraScriptContext); + afterInsertUpdate(count, errors, context.getInsertOption().updateOnly); + return count; + } + + @Override + public int mergeRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + { + throw new UnsupportedOperationException("merge is not supported for all tables"); + } + + private boolean hasTableScript(Container container) + { + return getQueryTable().hasTriggers(container); + } + + + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException("Not implemented by this QueryUpdateService"); + } + + + protected @Nullable List> _insertRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasInsertRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to insert data into this table."); + + return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); + } + + protected @Nullable List> _insertUpdateRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + DataIteratorBuilder dib = _toDataIteratorBuilder(getClass().getSimpleName() + (context.getInsertOption().updateOnly ? ".updateRows" : ".insertRows()"), rows); + ArrayList> outputRows = new ArrayList<>(); + int count = _importRowsUsingDIB(user, container, dib, outputRows, context, extraScriptContext); + afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); + + if (context.getErrors().hasErrors()) + return null; + + return outputRows; + } + + // not yet supported + protected @Nullable List> _updateRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasUpdateRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to update data in this table."); + + return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); + } + + + protected DataIteratorBuilder _toDataIteratorBuilder(String debugName, List> rows) + { + // TODO probably can't assume all rows have all columns + // TODO can we assume that all rows refer to columns consistently? (not PTID and MouseId for the same column) + // TODO optimize ArrayListMap? + Set colNames; + + if (!rows.isEmpty() && rows.get(0) instanceof ArrayListMap) + { + colNames = ((ArrayListMap)rows.get(0)).getFindMap().keySet(); + } + else + { + // Preserve casing by using wrapped CaseInsensitiveHashMap instead of CaseInsensitiveHashSet + colNames = Sets.newCaseInsensitiveHashSet(); + for (Map row : rows) + colNames.addAll(row.keySet()); + } + + preImportDIBValidation(null, colNames); + return MapDataIterator.of(colNames, rows, debugName); + } + + + /** @deprecated switch to using DIB based method */ + @Deprecated + protected List> _insertRowsUsingInsertRow(User user, Container container, List> rows, BatchValidationException errors, Map extraScriptContext) + throws DuplicateKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasInsertRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to insert data into this table."); + + assert(getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + boolean hasTableScript = hasTableScript(container); + + errors.setExtraContext(extraScriptContext); + if (hasTableScript) + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); + + List> result = new ArrayList<>(rows.size()); + List> providedValues = new ArrayList<>(rows.size()); + for (int i = 0; i < rows.size(); i++) + { + Map row = rows.get(i); + row = normalizeColumnNames(row); + try + { + providedValues.add(new CaseInsensitiveHashMap<>()); + row = coerceTypes(row, providedValues.get(i), false); + if (hasTableScript) + { + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, true, i, row, null, extraScriptContext); + } + row = insertRow(user, container, row); + if (row == null) + continue; + + if (hasTableScript) + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, false, i, row, null, extraScriptContext); + result.add(row); + } + catch (SQLException sqlx) + { + if (StringUtils.startsWith(sqlx.getSQLState(), "22") || RuntimeSQLException.isConstraintException(sqlx)) + { + ValidationException vex = new ValidationException(sqlx.getMessage()); + vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i+1); + errors.addRowError(vex); + } + else if (SqlDialect.isTransactionException(sqlx) && errors.hasErrors()) + { + // if we already have some errors, just break + break; + } + else + { + throw sqlx; + } + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + } + + if (hasTableScript) + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); + + addAuditEvent(user, container, QueryService.AuditAction.INSERT, null, result, null, providedValues); + + return result; + } + + protected void addAuditEvent(User user, Container container, QueryService.AuditAction auditAction, @Nullable Map configParameters, @Nullable List> rows, @Nullable List> existingRows, @Nullable List> providedValues) + { + if (!isBulkLoad()) + { + AuditBehaviorType auditBehavior = configParameters != null ? (AuditBehaviorType) configParameters.get(AuditBehavior) : null; + String userComment = configParameters == null ? null : (String) configParameters.get(AuditUserComment); + getQueryTable().getAuditHandler(auditBehavior) + .addAuditEvent(user, container, getQueryTable(), auditBehavior, userComment, auditAction, rows, existingRows, providedValues); + } + } + + private Map normalizeColumnNames(Map row) + { + if(_columnImportMap == null) + { + _columnImportMap = (CaseInsensitiveHashMap)ImportAliasable.Helper.createImportMap(getQueryTable().getColumns(), false); + } + + Map newRow = new CaseInsensitiveHashMap<>(); + CaseInsensitiveHashSet columns = new CaseInsensitiveHashSet(); + columns.addAll(row.keySet()); + + String newName; + for(String key : row.keySet()) + { + if(_columnImportMap.containsKey(key)) + { + //it is possible for a normalized name to conflict with an existing property. if so, defer to the original + newName = _columnImportMap.get(key).getName(); + if(!columns.contains(newName)){ + newRow.put(newName, row.get(key)); + continue; + } + } + newRow.put(key, row.get(key)); + } + + return newRow; + } + + @Override + public List> insertRows(User user, Container container, List> rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + throws DuplicateKeyException, QueryUpdateServiceException, SQLException + { + try + { + List> ret = _insertRowsUsingInsertRow(user, container, rows, errors, extraScriptContext); + afterInsertUpdate(null==ret?0:ret.size(), errors); + if (errors.hasErrors()) + return null; + return ret; + } + catch (BatchValidationException x) + { + assert x == errors; + assert x.hasErrors(); + } + return null; + } + + protected Object coerceTypesValue(ColumnInfo col, Map providedValues, String key, Object value) + { + if (col != null && value != null && + !col.getJavaObjectClass().isInstance(value) && + !(value instanceof AttachmentFile) && + !(value instanceof MultipartFile) && + !(value instanceof String[]) && + !(col.isMultiValued() || col.getFk() instanceof MultiValuedForeignKey)) + { + try + { + if (col.getKindOfQuantity() != null) + providedValues.put(key, value); + if (PropertyType.FILE_LINK.equals(col.getPropertyType())) + value = ExpDataFileConverter.convert(value); + else + value = col.convert(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw e; + } + catch (ConversionException e) + { + // That's OK, the transformation script may be able to fix up the value before it gets inserted + } + } + + return value; + } + + /** Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion */ + @Deprecated + protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) + { + Map result = new CaseInsensitiveHashMap<>(row.size()); + Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); + for (Map.Entry entry : row.entrySet()) + { + ColumnInfo col = columnMap.get(entry.getKey()); + Object value = coerceTypesValue(col, providedValues, entry.getKey(), entry.getValue()); + result.put(entry.getKey(), value); + } + + return result; + } + + protected abstract Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; + + + protected boolean firstUpdateRow = true; + Function,Map> updateTransform = Function.identity(); + + /* Do standard AQUS stuff here, then call the subclass specific implementation of updateRow() */ + final protected Map updateOneRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + if (firstUpdateRow) + { + firstUpdateRow = false; + if (null != OntologyService.get()) + { + var t = OntologyService.get().getConceptUpdateHandler(_queryTable); + if (null != t) + updateTransform = t; + } + } + row = updateTransform.apply(row); + return updateRow(user, container, row, oldRow, configParameters); + } + + // used by updateRows to check if all rows have the same set of keys + // prepared statement can only be used to updateRows if all rows have the same set of keys + protected static boolean hasUniformKeys(List> rowsToUpdate) + { + if (rowsToUpdate == null || rowsToUpdate.isEmpty()) + return false; + + if (rowsToUpdate.size() == 1) + return true; + + Set keys = rowsToUpdate.get(0).keySet(); + int keySize = keys.size(); + + for (int i = 1 ; i < rowsToUpdate.size(); i ++) + { + Set otherKeys = rowsToUpdate.get(i).keySet(); + if (otherKeys.size() != keySize) + return false; + if (!otherKeys.containsAll(keys)) + return false; + } + + return true; + } + + @Override + public List> updateRows(User user, Container container, List> rows, List> oldKeys, + BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasUpdateRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to update data in this table."); + + if (oldKeys != null && rows.size() != oldKeys.size()) + throw new IllegalArgumentException("rows and oldKeys are required to be the same length, but were " + rows.size() + " and " + oldKeys + " in length, respectively"); + + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); + + List> result = new ArrayList<>(rows.size()); + List> oldRows = new ArrayList<>(rows.size()); + List> providedValues = new ArrayList<>(rows.size()); + // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container + boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; + + for (int i = 0; i < rows.size(); i++) + { + Map row = rows.get(i); + providedValues.add(new CaseInsensitiveHashMap<>()); + row = coerceTypes(row, providedValues.get(i), true); + try + { + Map oldKey = oldKeys == null ? row : oldKeys.get(i); + Map oldRow = null; + if (!streaming) + { + oldRow = getRow(user, container, oldKey); + if (oldRow == null) + throw new NotFoundException("The existing row was not found."); + } + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, true, i, row, oldRow, extraScriptContext); + Map updatedRow = updateOneRow(user, container, row, oldRow, configParameters); + if (!streaming && updatedRow == null) + continue; + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow, extraScriptContext); + if (!streaming) + { + result.add(updatedRow); + oldRows.add(oldRow); + } + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (OptimisticConflictException e) + { + errors.addRowError(new ValidationException("Unable to update. Row may have been deleted.")); + } + } + + // Fire triggers, if any, and also throw if there are any errors + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); + afterInsertUpdate(null==result?0:result.size(), errors, true); + + if (errors.hasErrors()) + throw errors; + + addAuditEvent(user, container, QueryService.AuditAction.UPDATE, configParameters, result, oldRows, providedValues); + + return result; + } + + protected void validatePartitionedRowKeys(Collection columns) + { + // do nothing + } + + public List> updateRowsUsingPartitionedDIB( + DbScope.Transaction tx, + User user, + Container container, + List> rows, + BatchValidationException errors, + @Nullable Map configParameters, + Map extraScriptContext + ) + { + int index = 0; + int numPartitions = 0; + List> ret = new ArrayList<>(); + + Set observedRowIds = new HashSet<>(); + Set observedNames = new CaseInsensitiveHashSet(); + + while (index < rows.size()) + { + CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); + + validatePartitionedRowKeys(rowKeys); + + int nextIndex = index + 1; + while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) + nextIndex++; + + List> rowsToProcess = rows.subList(index, nextIndex); + index = nextIndex; + numPartitions++; + + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, configParameters); + + // skip audit summary for the partitions, we will perform it once at the end + context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); + + List> subRet = _updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); + + // we need to throw if we don't want executeWithRetry() attempt commit() + if (context.getErrors().hasErrors()) + throw new DbScope.RetryPassthroughException(context.getErrors()); + + if (subRet != null) + { + ret.addAll(subRet); + + // Check if duplicate rows have been processed across the partitions + // Only start checking for duplicates after the first partition has been processed. + if (numPartitions > 1) + { + // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition + checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); + } + + if (errors.hasErrors()) + throw new DbScope.RetryPassthroughException(errors); + } + } + + if (numPartitions > 1) + { + var auditEvent = tx.getAuditEvent(); + if (auditEvent != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); + } + + _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, configParameters), ret.size()); + + return ret; + } + + private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) + { + for (Map row : partitionRows) + { + Long rowId = MapUtils.getLong(row, RowId.name()); + if (rowId != null && !globalRowIds.add(rowId)) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); + return; + } + + Object nameObj = row.get(Name.name()); + if (nameObj != null && !globalNames.add(nameObj.toString())) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); + return; + } + } + } + + protected void checkDuplicateUpdate(Object pkVals) throws ValidationException + { + if (pkVals == null) + return; + + Set updatedRows = getPreviouslyUpdatedRows(); + + Object[] keysObj; + if (pkVals.getClass().isArray()) + keysObj = (Object[]) pkVals; + else if (pkVals instanceof Map map) + { + List orderedKeyVals = new ArrayList<>(); + SortedSet sortedKeys = new TreeSet<>(map.keySet()); + for (String key : sortedKeys) + orderedKeyVals.add(map.get(key)); + keysObj = orderedKeyVals.toArray(); + } + else + keysObj = new Object[]{pkVals}; + + if (keysObj.length == 1) + { + if (updatedRows.contains(keysObj[0])) + throw new ValidationException("Duplicate key provided: " + keysObj[0]); + updatedRows.add(keysObj[0]); + return; + } + + List keys = new ArrayList<>(); + for (Object key : keysObj) + keys.add(String.valueOf(key)); + if (updatedRows.contains(keys)) + throw new ValidationException("Duplicate key provided: " + StringUtils.join(keys, ", ")); + updatedRows.add(keys); + } + + @Override + public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException("Move is not supported for this table type."); + } + + protected abstract Map deleteRow(User user, Container container, Map oldRow) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; + + protected Map deleteRow(User user, Container container, Map oldRow, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return deleteRow(user, container, oldRow); + } + + @Override + public List> deleteRows(User user, Container container, List> keys, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasDeleteRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to delete data from this table."); + + BatchValidationException errors = new BatchValidationException(); + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); + + // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container + boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; + + List> result = new ArrayList<>(keys.size()); + for (int i = 0; i < keys.size(); i++) + { + Map key = keys.get(i); + try + { + Map oldRow = null; + if (!streaming) + { + oldRow = getRow(user, container, key); + // if row doesn't exist, bail early + if (oldRow == null) + continue; + } + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, true, i, null, oldRow, extraScriptContext); + Map updatedRow = deleteRow(user, container, oldRow, configParameters, extraScriptContext); + if (!streaming && updatedRow == null) + continue; + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, false, i, null, updatedRow, extraScriptContext); + result.add(updatedRow); + } + catch (InvalidKeyException ex) + { + ValidationException vex = new ValidationException(ex.getMessage()); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + } + + // Fire triggers, if any, and also throw if there are any errors + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); + + addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, result, null, null); + + return result; + } + + protected int truncateRows(User user, Container container) + throws QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException(); + } + + @Override + public int truncateRows(User user, Container container, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!container.hasPermission(user, AdminPermission.class) && !hasDeleteRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to truncate this table."); + + BatchValidationException errors = new BatchValidationException(); + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, true, errors, extraScriptContext); + + int result = truncateRows(user, container); + + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, false, errors, extraScriptContext); + addAuditEvent(user, container, QueryService.AuditAction.TRUNCATE, configParameters, null, null, null); + + return result; + } + + @Override + public void setBulkLoad(boolean bulkLoad) + { + _bulkLoad = bulkLoad; + } + + @Override + public boolean isBulkLoad() + { + return _bulkLoad; + } + + public static Object saveFile(User user, Container container, String name, Object value, @Nullable String dirName) throws ValidationException, QueryUpdateServiceException + { + FileLike dirPath = AssayFileWriter.getUploadDirectoryPath(container, dirName); + return saveFile(user, container, name, value, dirPath); + } + + /** + * Save uploaded file to dirName directory under file or pipeline root. + */ + public static Object saveFile(User user, Container container, String name, Object value, @Nullable FileLike dirPath) throws ValidationException, QueryUpdateServiceException + { + if (!(value instanceof MultipartFile) && !(value instanceof SpringAttachmentFile)) + throw new ValidationException("Invalid file value"); + + String auditMessageFormat = "Saved file '%s' for field '%s' in folder %s."; + FileLike file = null; + try + { + FileLike dir = AssayFileWriter.ensureUploadDirectory(dirPath); + + FileSystemAuditProvider.FileSystemAuditEvent event = new FileSystemAuditProvider.FileSystemAuditEvent(container, null); + if (value instanceof MultipartFile multipartFile) + { + // Once we've found one, write it to disk and replace the row's value with just the File reference to it + if (multipartFile.isEmpty()) + { + throw new ValidationException("File " + multipartFile.getOriginalFilename() + " for field " + name + " has no content"); + } + file = FileUtil.findUniqueFileName(multipartFile.getOriginalFilename(), dir); + checkFileUnderRoot(container, file); + multipartFile.transferTo(toFileForWrite(file)); + event.setComment(String.format(auditMessageFormat, multipartFile.getOriginalFilename(), name, container.getPath())); + event.setProvidedFileName(multipartFile.getOriginalFilename()); + } + else + { + SpringAttachmentFile saf = (SpringAttachmentFile) value; + file = FileUtil.findUniqueFileName(saf.getFilename(), dir); + checkFileUnderRoot(container, file); + saf.saveTo(file); + event.setComment(String.format(auditMessageFormat, saf.getFilename(), name, container.getPath())); + event.setProvidedFileName(saf.getFilename()); + } + event.setFile(file.getName()); + event.setFieldName(name); + event.setDirectory(file.getParent().toURI().getPath()); + AuditLogService.get().addEvent(user, event); + } + catch (IOException | ExperimentException e) + { + throw new QueryUpdateServiceException(e); + } + + ensureExpData(user, container, file.toNioPathForRead().toFile()); + return file; + } + + public static ExpData ensureExpData(User user, Container container, File file) + { + ExpData existingData = ExperimentService.get().getExpDataByURL(file, container); + // create exp.data record + if (existingData == null) + { + File canonicalFile = FileUtil.getAbsoluteCaseSensitiveFile(file); + ExpData data = ExperimentService.get().createData(container, UPLOADED_FILE); + data.setName(file.getName()); + data.setDataFileURI(canonicalFile.toPath().toUri()); + if (data.getDataFileUrl() != null && data.getDataFileUrl().length() <= ExperimentService.get().getTinfoData().getColumn("DataFileURL").getScale()) + { + // If the path is too long to store, bail out without creating an exp.data row + data.save(user); + } + + return data; + } + + return existingData; + } + + // For security reasons, make sure the user hasn't tried to reference a file that's not under + // the pipeline root or @assayfiles root. Otherwise, they could get access to any file on the server + static FileLike checkFileUnderRoot(Container container, FileLike file) throws ExperimentException + { + Path assayFilesRoot = FileContentService.get().getFileRootPath(container, FileContentService.ContentType.assayfiles); + if (assayFilesRoot != null && URIUtil.isDescendant(assayFilesRoot.toUri(), file.toURI())) + return file; + + PipeRoot root = PipelineService.get().findPipelineRoot(container); + if (root == null) + throw new ExperimentException("Pipeline root not available in container " + container.getPath()); + + if (!root.isUnderRoot(toFileForRead(file))) + { + throw new ExperimentException("Cannot reference file '" + file + "' from " + container.getPath()); + } + + return file; + } + + protected void _addSummaryAuditEvent(Container container, User user, DataIteratorContext context, int count) + { + if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level + { + AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); + String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); + boolean skipAuditLevelCheck = false; + if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) + { + if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad + skipAuditLevelCheck = true; + } + getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); + } + } + + /** + * Is used by the AttachmentDataIterator to point to the location of the serialized + * attachment files. + */ + public void setAttachmentDirectory(VirtualFile att) + { + _att = att; + } + + @Nullable + protected VirtualFile getAttachmentDirectory() + { + return _att; + } + + /** + * QUS instances that allow import of attachments through the AttachmentDataIterator should furnish a factory + * implementation in order to resolve the attachment parent on incoming attachment files. + */ + @Nullable + protected AttachmentParentFactory getAttachmentParentFactory() + { + return null; + } + + /** Translate between the column name that query is exposing to the column name that actually lives in the database */ + protected static void aliasColumns(Map columnMapping, Map row) + { + for (Map.Entry entry : columnMapping.entrySet()) + { + if (row.containsKey(entry.getValue()) && !row.containsKey(entry.getKey())) + { + row.put(entry.getKey(), row.get(entry.getValue())); + } + } + } + + /** + * The database table has underscores for MV column names, but we expose a column without the underscore. + * Therefore, we need to translate between the two sets of column names. + * @return database column name -> exposed TableInfo column name + */ + protected static Map createMVMapping(Domain domain) + { + Map result = new CaseInsensitiveHashMap<>(); + if (domain != null) + { + for (DomainProperty domainProperty : domain.getProperties()) + { + if (domainProperty.isMvEnabled()) + { + result.put(PropertyStorageSpec.getMvIndicatorStorageColumnName(domainProperty.getPropertyDescriptor()), domainProperty.getName() + MvColumn.MV_INDICATOR_SUFFIX); + } + } + } + return result; + } + + @TestWhen(TestWhen.When.BVT) + public static class TestCase extends Assert + { + private boolean _useAlias = false; + + static TabLoader getTestData() throws IOException + { + TabLoader testData = new TabLoader(new StringReader("pk,i,s\n0,0,zero\n1,1,one\n2,2,two"),true); + testData.parseAsCSV(); + testData.getColumns()[0].clazz = Integer.class; + testData.getColumns()[1].clazz = Integer.class; + testData.getColumns()[2].clazz = String.class; + return testData; + } + + @BeforeClass + public static void createList() throws Exception + { + if (null == ListService.get()) + return; + deleteList(); + + TabLoader testData = getTestData(); + String hash = GUID.makeHash(); + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + ListService s = ListService.get(); + UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); + assertNotNull(lists); + + ListDefinition R = s.createList(c, "R", ListDefinition.KeyType.Integer); + R.setKeyName("pk"); + Domain d = requireNonNull(R.getDomain()); + for (int i=0 ; i> getRows() + { + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); + TableInfo rTableInfo = requireNonNull(lists.getTable("R", null)); + return Arrays.asList(new TableSelector(rTableInfo, TableSelector.ALL_COLUMNS, null, new Sort("PK")).getMapArray()); + } + + @Before + public void resetList() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + qus.truncateRows(user, c, null, null); + } + + @AfterClass + public static void deleteList() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + ListService s = ListService.get(); + Map m = s.getLists(c); + if (m.containsKey("R")) + m.get("R").delete(user); + } + + void validateDefaultData(List> rows) + { + assertEquals(3, rows.size()); + + assertEquals(0, rows.get(0).get("pk")); + assertEquals(1, rows.get(1).get("pk")); + assertEquals(2, rows.get(2).get("pk")); + + assertEquals(0, rows.get(0).get("i")); + assertEquals(1, rows.get(1).get("i")); + assertEquals(2, rows.get(2).get("i")); + + assertEquals("zero", rows.get(0).get("s")); + assertEquals("one", rows.get(1).get("s")); + assertEquals("two", rows.get(2).get("s")); + } + + @Test + public void INSERT() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + assert(getRows().isEmpty()); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + BatchValidationException errors = new BatchValidationException(); + var rows = qus.insertRows(user, c, getTestData().load(), errors, null, null); + assertFalse(errors.hasErrors()); + validateDefaultData(rows); + validateDefaultData(getRows()); + + qus.insertRows(user, c, getTestData().load(), errors, null, null); + assertTrue(errors.hasErrors()); + } + + @Test + public void UPSERT() throws Exception + { + if (null == ListService.get()) + return; + /* not sure how you use/test ImportOptions.UPSERT + * the only row returning QUS method is insertRows(), which doesn't let you specify the InsertOption? + */ + } + + @Test + public void IMPORT() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + assert(getRows().isEmpty()); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + BatchValidationException errors = new BatchValidationException(); + var count = qus.importRows(user, c, getTestData(), errors, null, null); + assertFalse(errors.hasErrors()); + assert(count == 3); + validateDefaultData(getRows()); + + qus.importRows(user, c, getTestData(), errors, null, null); + assertTrue(errors.hasErrors()); + } + + @Test + public void MERGE() throws Exception + { + if (null == ListService.get()) + return; + INSERT(); + assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var mergeRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); + BatchValidationException errors = new BatchValidationException() + { + @Override + public void addRowError(ValidationException vex) + { + LogManager.getLogger(AbstractQueryUpdateService.class).error("test error", vex); + fail(vex.getMessage()); + } + }; + int count=0; + try (var tx = rTableInfo.getSchema().getScope().ensureTransaction()) + { + var ret = qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); + if (!errors.hasErrors()) + { + tx.commit(); + count = ret; + } + } + assertFalse("mergeRows error(s): " + errors.getMessage(), errors.hasErrors()); + assertEquals(2, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO", rows.get(2).get("s")); + // test existing row value is not updated + assertEquals(2, rows.get(2).get("i")); + // test new row + assertEquals("THREE", rows.get(3).get("s")); + assertNull(rows.get(3).get("i")); + + // merge should fail if duplicate keys are provided + errors = new BatchValidationException(); + mergeRows = new ArrayList<>(); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); + qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); + assertTrue(errors.hasErrors()); + assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); + } + + @Test + public void UPDATE() throws Exception + { + if (null == ListService.get()) + return; + INSERT(); + assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var updateRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + + // update using data iterator + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP")); + DataIteratorContext context = new DataIteratorContext(); + context.setInsertOption(InsertOption.UPDATE); + var count = qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertFalse(context.getErrors().hasErrors()); + assertEquals(1, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO-UP", rows.get(2).get("s")); + // test existing row value is not updated/erased + assertEquals(2, rows.get(2).get("i")); + + // update should fail if a new record is provided + updateRows = new ArrayList<>(); + updateRows.add(CaseInsensitiveHashMap.of(pkName,123,colName,"NEW")); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertTrue(context.getErrors().hasErrors()); + + // Issue 52728: update should fail if duplicate key is provide + updateRows = new ArrayList<>(); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); + + // use DIB + context = new DataIteratorContext(); + context.setInsertOption(InsertOption.UPDATE); + qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertTrue(context.getErrors().hasErrors()); + assertTrue("Duplicate key error: " + context.getErrors().getMessage(), context.getErrors().getMessage().contains("Duplicate key provided: 2")); + + // use updateRows + if (!_useAlias) // _update using alias is not supported + { + BatchValidationException errors = new BatchValidationException(); + try + { + qus.updateRows(user, c, updateRows, null, errors, null, null); + } + catch (Exception e) + { + + } + assertTrue(errors.hasErrors()); + assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); + + } + } + + @Test + public void REPLACE() throws Exception + { + if (null == ListService.get()) + return; + assert(getRows().isEmpty()); + INSERT(); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var mergeRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); + DataIteratorContext context = new DataIteratorContext(); + context.setInsertOption(InsertOption.REPLACE); + var count = qus.loadRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), context, null); + assertFalse(context.getErrors().hasErrors()); + assertEquals(2, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO", rows.get(2).get("s")); + // test existing row value is updated + assertNull(rows.get(2).get("i")); + // test new row + assertEquals("THREE", rows.get(3).get("s")); + assertNull(rows.get(3).get("i")); + } + + @Test + public void IMPORT_IDENTITY() + { + if (null == ListService.get()) + return; + // TODO + } + + @Test + public void ALIAS_MERGE() throws Exception + { + _useAlias = true; + MERGE(); + } + + @Test + public void ALIAS_REPLACE() throws Exception + { + _useAlias = true; + REPLACE(); + } + + @Test + public void ALIAS_UPDATE() throws Exception + { + _useAlias = true; + UPDATE(); + } + } +} diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index f62a69ca510..d04ea279c65 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -1,936 +1,936 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.beanutils.ConvertUtils; -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveMapWrapper; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MvUtil; -import org.labkey.api.data.Parameter; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.validator.ColumnValidator; -import org.labkey.api.data.validator.ColumnValidators; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.OntologyObject; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.reader.ColumnDescriptor; -import org.labkey.api.reader.DataLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.CachingSupplier; -import org.labkey.api.util.Pair; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.IOException; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Supplier; - -/** - * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. - * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the - * hard table columns. - */ -public class DefaultQueryUpdateService extends AbstractQueryUpdateService -{ - private final TableInfo _dbTable; - private DomainUpdateHelper _helper = null; - /** - * Map from DbTable column names to QueryTable column names, if they have been aliased - */ - protected Map _columnMapping = Collections.emptyMap(); - /** - * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process - */ - private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); - private final ValidatorContext _validatorContext; - private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); - - public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) - { - super(queryTable); - _dbTable = dbTable; - - if (queryTable.getUserSchema() == null) - throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); - - _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); - } - - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) - { - this(queryTable, dbTable); - _helper = helper; - } - - /** - * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased - */ - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) - { - this(queryTable, dbTable); - _columnMapping = columnMapping; - } - - protected TableInfo getDbTable() - { - return _dbTable; - } - - protected Domain getDomain() - { - return _helper == null ? null : _helper.getDomain(); - } - - protected ColumnInfo getObjectUriColumn() - { - return _helper == null ? null : _helper.getObjectUriColumn(); - } - - protected String createObjectURI() - { - return _helper == null ? null : _helper.createObjectURI(); - } - - protected Iterable getPropertyColumns() - { - return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); - } - - protected Map getColumnMapping() - { - return _columnMapping; - } - - /** - * Returns the container that the domain is defined - */ - protected Container getDomainContainer(Container c) - { - return _helper == null ? c : _helper.getDomainContainer(c); - } - - /** - * Returns the container to insert/update values into - */ - protected Container getDomainObjContainer(Container c) - { - return _helper == null ? c : _helper.getDomainObjContainer(c); - } - - protected Set getAutoPopulatedColumns() - { - return Table.AUTOPOPULATED_COLUMN_NAMES; - } - - public interface DomainUpdateHelper - { - Domain getDomain(); - - ColumnInfo getObjectUriColumn(); - - String createObjectURI(); - - // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. - Iterable getPropertyColumns(); - - Container getDomainContainer(Container c); - - Container getDomainObjContainer(Container c); - } - - public class ImportHelper implements OntologyManager.ImportHelper - { - ImportHelper() - { - } - - @Override - public String beforeImportObject(Map map) - { - ColumnInfo objectUriCol = getObjectUriColumn(); - - // Get existing Lsid - String lsid = (String) map.get(objectUriCol.getName()); - if (lsid != null) - return lsid; - - // Generate a new Lsid - lsid = createObjectURI(); - map.put(objectUriCol.getName(), lsid); - return lsid; - } - - @Override - public void afterBatchInsert(int currentRow) - { - } - - @Override - public void updateStatistics(int currentRow) - { - } - } - - @Override - protected Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, keys); - Map row = _select(container, getKeys(keys, container)); - - //PostgreSQL includes a column named _row for the row index, but since this is selecting by - //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it - if (null != row) - { - if (row instanceof ArrayListMap) - ((ArrayListMap) row).getFindMap().remove("_row"); - else - row.remove("_row"); - } - - return row; - } - - protected Map _select(Container container, Object[] keys) throws ConversionException - { - TableInfo table = getDbTable(); - Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); - - Map row = new TableSelector(table).getMap(typedParameters); - - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) - { - String lsid = (String) row.get(objectUriCol.getName()); - if (lsid != null) - { - Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); - if (!propertyValues.isEmpty()) - { - // convert PropertyURI->value map into "Property name"->value map - Map propertyMap = domain.createImportMap(false); - for (Map.Entry entry : propertyValues.entrySet()) - { - String propertyURI = entry.getKey(); - DomainProperty dp = propertyMap.get(propertyURI); - PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; - if (pd != null) - row.put(pd.getName(), entry.getValue()); - } - } - } - // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been - // inserted before the table was made extensible), but make sure that we got an LSID field - // when fetching the row - else if (!row.containsKey(objectUriCol.getName())) - { - throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); - } - } - - return row; - } - - - private Object[] convertToTypedValues(Object[] keys, List cols) - { - Object[] typedParameters = new Object[keys.length]; - int t = 0; - for (int i = 0; i < keys.length; i++) - { - if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) - { - typedParameters[t++] = keys[i]; - continue; - } - Object v = keys[i]; - JdbcType type = cols.get(i).getJdbcType(); - if (v instanceof String) - v = type.convert(v); - Parameter.TypedValue tv = new Parameter.TypedValue(v, type); - typedParameters[t++] = tv; - } - return typedParameters; - } - - - @Override - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, row); - convertTypes(user, container, row); - setSpecialColumns(container, row, user, InsertPermission.class); - validateInsertRow(row); - return _insert(user, container, row); - } - - protected Map _insert(User user, Container c, Map row) - throws SQLException, ValidationException - { - assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - List pds = new ArrayList<>(); - Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - pds.add(pd); - Object value = getPropertyValue(row, pd); - values.put(pd.getPropertyURI(), value); - } - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); - String lsid = collector.getLsid(); - - // Add the new lsid to the row map. - row.put(objectUriCol.getName(), lsid); - } - - return Table.insert(user, getDbTable(), row); - } - catch (RuntimeValidationException e) - { - throw e.getValidationException(); - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - } - - @Override - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return updateRow(user, container, row, oldRow, false, false); - } - - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); - - // Flip the key/value pairs around for easy lookup - Map queryToDb = new CaseInsensitiveHashMap<>(); - for (Map.Entry entry : _columnMapping.entrySet()) - { - queryToDb.put(entry.getValue(), entry.getKey()); - } - - setSpecialColumns(container, row, user, UpdatePermission.class); - - Map tableAliasesMap = _tableMapSupplier.get(); - Map> colFrequency = new HashMap<>(); - - //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table - for (Map.Entry entry: row.entrySet()) - { - if (!rowStripped.containsKey(entry.getKey())) - { - ColumnInfo col = getQueryTable().getColumn(entry.getKey()); - - if (null == col) - { - col = tableAliasesMap.get(entry.getKey()); - } - - if (null != col) - { - final String name = col.getName(); - - // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. - if (col.isReadOnly() || col.isCalculated()) - continue; - - //when updating a row, we should strip the following fields, as they are - //automagically maintained by the table layer, and should not be allowed - //to change once the record exists. - //unfortunately, the Table.update() method doesn't strip these, so we'll - //do that here. - // Owner, CreatedBy, Created, EntityId - if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) - || (!allowOwner && name.equalsIgnoreCase("Owner")) - || name.equalsIgnoreCase("EntityId")) - continue; - - // Throw error if more than one row properties having different values match up to the same column. - if (!colFrequency.containsKey(col)) - { - colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); - } - else - { - if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) - { - throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); - } - } - - // We want a map using the DbTable column names as keys, so figure out the right name to use - String dbName = queryToDb.getOrDefault(name, name); - rowStripped.put(dbName, entry.getValue()); - } - } - } - - convertTypes(user, container, rowStripped); - validateUpdateRow(rowStripped); - - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (rowContainer == null) - { - throw new ValidationException("Unknown container: " + row.get("container")); - } - else - { - Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (null != oldContainer && !rowContainer.equals(oldContainer)) - throw new UnauthorizedException("The row is from the wrong container."); - } - } - - Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); - - //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes - //the primary key columns as well as those marked read-only. So we can't simply return the map returned - //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. - row.putAll(updatedRow); - return row; - } - - protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException - { - DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); - List validators = ColumnValidators.create(column, dp); - for (ColumnValidator v : validators) - { - String msg = v.validate(-1, value, _validatorContext, providedValue); - if (msg != null) - throw new ValidationException(msg, column.getName()); - } - } - - protected void validateInsertRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - Object value = row.get(col.getColumnName()); - - // Check required values aren't null or empty - if (null == value || value instanceof String s && s.isEmpty()) - { - if (!col.isAutoIncrement() && col.isRequired() && - !getAutoPopulatedColumns().contains(col.getName()) && - col.getJdbcDefaultValue() == null) - { - throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); - } - } - else - { - validateValue(col, value, null); - } - } - } - - protected void validateUpdateRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - // Only validate incoming values - if (row.containsKey(col.getColumnName())) - { - Object value = row.get(col.getColumnName()); - validateValue(col, value, null); - } - } - } - - protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) - throws SQLException, ValidationException - { - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - - // The lsid may be null for the row until a property has been inserted - String lsid = null; - if (objectUriCol != null) - lsid = (String) oldRow.get(objectUriCol.getName()); - - List tableProperties = new ArrayList<>(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - tableProperties.add(pd); - - // clear out the old value if it exists and is contained in the new row (it may be incoming as null) - if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) - OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); - - Object value = getPropertyValue(row, pd); - if (value != null) - newValues.put(pd.getPropertyURI(), value); - } - - // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() - newValues.put(objectUriCol.getName(), lsid); - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); - - // Update the lsid in the row: the lsid may have not existed in the row before the update. - lsid = collector.getLsid(); - row.put(objectUriCol.getName(), lsid); - } - - // Get lsid value if it hasn't been set. - // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) - if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) - { - String objectUriColName = updateableTableInfo.getObjectURIColumnName(); - if (objectUriColName != null) - lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); - } - - // handle vocabulary properties - if (lsid != null) - { - for (Map.Entry rowEntry : row.entrySet()) - { - String colName = rowEntry.getKey(); - Object value = rowEntry.getValue(); - - ColumnInfo col = getQueryTable().getColumn(colName); - if (col instanceof PropertyColumn propCol) - { - PropertyDescriptor pd = propCol.getPropertyDescriptor(); - if (pd.isVocabulary() && !tableProperties.contains(pd)) - { - OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); - } - } - } - } - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - - checkDuplicateUpdate(keys); - - return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) - } - - private static class LsidCollector implements OntologyManager.RowCallback - { - private String _lsid; - - @Override - public void rowProcessed(Map row, String lsid) - { - if (_lsid != null) - { - throw new IllegalStateException("Only expected a single LSID"); - } - _lsid = lsid; - } - - public String getLsid() - { - if (_lsid == null) - { - throw new IllegalStateException("No LSID returned"); - } - return _lsid; - } - } - - // Get value from row map where the keys are column names. - private Object getPropertyValue(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return row.get(pd.getName()); - - if (row.containsKey(pd.getLabel())) - return row.get(pd.getLabel()); - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return row.get(alias); - } - - return null; - } - - // Checks a value exists in the row map (value may be null) - private boolean hasProperty(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return true; - - if (row.containsKey(pd.getLabel())) - return true; - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return true; - } - - return false; - } - - @Override - protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException - { - if (oldRowMap == null) - return null; - - aliasColumns(_columnMapping, oldRowMap); - - if (container != null && getDbTable().getColumn("container") != null) - { - // UNDONE: 9077: check container permission on each row before delete - Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); - if (null != rowContainer && !container.equals(rowContainer)) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (container.allowRowMutationForContainer(rowContainer)) - container = rowContainer; - else - throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); - } - } - - _delete(container, oldRowMap); - return oldRowMap; - } - - protected void _delete(Container c, Map row) throws InvalidKeyException - { - ColumnInfo objectUriCol = getObjectUriColumn(); - if (objectUriCol != null) - { - String lsid = (String)row.get(objectUriCol.getName()); - if (lsid != null) - { - OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); - if (oo != null) - OntologyManager.deleteProperties(c, oo.getObjectId()); - } - } - Table.delete(getDbTable(), getKeys(row, c)); - } - - // classes should override this method if they need to do more work than delete all the rows from the table - // this implementation will delete all rows from the table for the given container as well as delete - // any properties associated with the table - @Override - protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException - { - // get rid of the properties for this table - if (null != getObjectUriColumn()) - { - SQLFragment lsids = new SQLFragment() - .append("SELECT t.").append(getObjectUriColumn().getColumnName()) - .append(" FROM ").append(getDbTable(), "t") - .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); - if (null != getDbTable().getColumn("container")) - { - lsids.append(" AND t.Container = ?"); - lsids.add(container.getId()); - } - - OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); - } - - // delete all the rows in this table, scoping to the container if the column - // is available - if (null != getDbTable().getColumn("container")) - return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); - - return Table.delete(getDbTable()); - } - - protected Object[] getKeys(Map map, Container container) throws InvalidKeyException - { - //build an array of pk values based on the table info - TableInfo table = getDbTable(); - List pks = table.getPkColumns(); - Object[] pkVals = new Object[pks.size()]; - - if (map == null || map.isEmpty()) - return pkVals; - - for (int idx = 0; idx < pks.size(); ++idx) - { - ColumnInfo pk = pks.get(idx); - Object pkValue = map.get(pk.getName()); - // Check the type and coerce if needed - if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) - { - try - { - pkValue = pk.convert(pkValue); - } - catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } - } - pkVals[idx] = pkValue; - if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) - { - pkVals[idx] = container; - } - if(null == pkVals[idx]) - { - throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); - } - } - return pkVals; - } - - private Map _missingValues = null; - private Container _missingValuesContainer; - - protected boolean validMissingValue(Container c, String mv) - { - if (null == c) - return false; - if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) - { - _missingValues = MvUtil.getIndicatorsAndLabels(c); - _missingValuesContainer = c; - } - return _missingValues.containsKey(mv); - } - - final protected void convertTypes(User user, Container c, Map row) throws ValidationException - { - convertTypes(user, c, row, getDbTable(), null); - } - - // TODO Path->FileObject - // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? - protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException - { - for (ColumnInfo col : t.getColumns()) - { - if (col.isMvIndicatorColumn()) - continue; - boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); - if (!isColumnPresent) - continue; - - Object value = row.get(col.getName()); - - /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. - * If you update this code, check that code as well. */ - if (col.isMvEnabled()) - { - if (value instanceof String s && StringUtils.isEmpty(s)) - value = null; - - Object mvObj = row.get(col.getMvColumnName().getName()); - String mv = Objects.toString(mvObj, null); - if (StringUtils.isEmpty(mv)) - mv = null; - - if (null != mv) - { - if (!validMissingValue(c, mv)) - throw new ValidationException("Value is not a valid missing value indicator: " + mv); - } - else if (null != value) - { - String s = Objects.toString(value, null); - if (validMissingValue(c, s)) - { - mv = s; - value = null; - } - } - row.put(col.getMvColumnName().getName(), mv); - } - - value = convertColumnValue(col, value, user, c, fileLinkDirPath); - row.put(col.getName(), value); - } - } - - protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException - { - // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() - // improve handling of conversion errors - try - { - if (PropertyType.FILE_LINK == col.getPropertyType()) - { - if ((value instanceof MultipartFile || value instanceof AttachmentFile)) - { - FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); - value = fl.toNioPathForRead().toString(); - } - return ExpDataFileConverter.convert(value); - } - return col.getConvertFn().convert(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw new ValidationException(e.getMessage()); - } - catch (ConversionException e) - { - String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); - throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); - } - catch (QueryUpdateServiceException e) - { - throw new ValidationException("Save file link failed: " + col.getName()); - } - } - - /** - * Override this method to alter the row before insert or update. - * For example, you can automatically adjust certain column values based on context. - * @param container The current container - * @param row The row data - * @param user The current user - * @param clazz A permission class to test - */ - protected void setSpecialColumns(Container container, Map row, User user, Class clazz) - { - if (null != container) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); - if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) - { - row.put("container", rowContainer.getId()); //normalize to container ID - return; //accept the row-provided value - } - } - row.put("container", container.getId()); - } - } - - protected boolean hasAttachmentProperties() - { - Domain domain = getDomain(); - if (null != domain) - { - for (DomainProperty dp : domain.getProperties()) - if (null != dp && isAttachmentProperty(dp)) - return true; - } - return false; - } - - protected boolean isAttachmentProperty(@NotNull DomainProperty dp) - { - PropertyDescriptor pd = dp.getPropertyDescriptor(); - return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); - } - - protected boolean isAttachmentProperty(String name) - { - DomainProperty dp = getDomain().getPropertyByName(name); - if (dp != null) - return isAttachmentProperty(dp); - return false; - } - - protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException - { - if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) - { - boolean hasContainerField = false; - for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) - { - String fieldName = columnDescriptor.getColumnName(); - if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) - { - hasContainerField = true; - break; - } - } - if (!hasContainerField) - context.setCrossFolderImport(false); - } - } -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.ConvertUtils; +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveMapWrapper; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MvUtil; +import org.labkey.api.data.Parameter; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.validator.ColumnValidator; +import org.labkey.api.data.validator.ColumnValidators; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.OntologyObject; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.reader.ColumnDescriptor; +import org.labkey.api.reader.DataLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.CachingSupplier; +import org.labkey.api.util.Pair; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.IOException; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Supplier; + +/** + * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. + * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the + * hard table columns. + */ +public class DefaultQueryUpdateService extends AbstractQueryUpdateService +{ + private final TableInfo _dbTable; + private DomainUpdateHelper _helper = null; + /** + * Map from DbTable column names to QueryTable column names, if they have been aliased + */ + protected Map _columnMapping = Collections.emptyMap(); + /** + * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process + */ + private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); + private final ValidatorContext _validatorContext; + private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); + + public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) + { + super(queryTable); + _dbTable = dbTable; + + if (queryTable.getUserSchema() == null) + throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); + + _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); + } + + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) + { + this(queryTable, dbTable); + _helper = helper; + } + + /** + * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased + */ + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) + { + this(queryTable, dbTable); + _columnMapping = columnMapping; + } + + protected TableInfo getDbTable() + { + return _dbTable; + } + + protected Domain getDomain() + { + return _helper == null ? null : _helper.getDomain(); + } + + protected ColumnInfo getObjectUriColumn() + { + return _helper == null ? null : _helper.getObjectUriColumn(); + } + + protected String createObjectURI() + { + return _helper == null ? null : _helper.createObjectURI(); + } + + protected Iterable getPropertyColumns() + { + return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); + } + + protected Map getColumnMapping() + { + return _columnMapping; + } + + /** + * Returns the container that the domain is defined + */ + protected Container getDomainContainer(Container c) + { + return _helper == null ? c : _helper.getDomainContainer(c); + } + + /** + * Returns the container to insert/update values into + */ + protected Container getDomainObjContainer(Container c) + { + return _helper == null ? c : _helper.getDomainObjContainer(c); + } + + protected Set getAutoPopulatedColumns() + { + return Table.AUTOPOPULATED_COLUMN_NAMES; + } + + public interface DomainUpdateHelper + { + Domain getDomain(); + + ColumnInfo getObjectUriColumn(); + + String createObjectURI(); + + // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. + Iterable getPropertyColumns(); + + Container getDomainContainer(Container c); + + Container getDomainObjContainer(Container c); + } + + public class ImportHelper implements OntologyManager.ImportHelper + { + ImportHelper() + { + } + + @Override + public String beforeImportObject(Map map) + { + ColumnInfo objectUriCol = getObjectUriColumn(); + + // Get existing Lsid + String lsid = (String) map.get(objectUriCol.getName()); + if (lsid != null) + return lsid; + + // Generate a new Lsid + lsid = createObjectURI(); + map.put(objectUriCol.getName(), lsid); + return lsid; + } + + @Override + public void afterBatchInsert(int currentRow) + { + } + + @Override + public void updateStatistics(int currentRow) + { + } + } + + @Override + protected Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, keys); + Map row = _select(container, getKeys(keys, container)); + + //PostgreSQL includes a column named _row for the row index, but since this is selecting by + //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it + if (null != row) + { + if (row instanceof ArrayListMap) + ((ArrayListMap) row).getFindMap().remove("_row"); + else + row.remove("_row"); + } + + return row; + } + + protected Map _select(Container container, Object[] keys) throws ConversionException + { + TableInfo table = getDbTable(); + Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); + + Map row = new TableSelector(table).getMap(typedParameters); + + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) + { + String lsid = (String) row.get(objectUriCol.getName()); + if (lsid != null) + { + Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); + if (!propertyValues.isEmpty()) + { + // convert PropertyURI->value map into "Property name"->value map + Map propertyMap = domain.createImportMap(false); + for (Map.Entry entry : propertyValues.entrySet()) + { + String propertyURI = entry.getKey(); + DomainProperty dp = propertyMap.get(propertyURI); + PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; + if (pd != null) + row.put(pd.getName(), entry.getValue()); + } + } + } + // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been + // inserted before the table was made extensible), but make sure that we got an LSID field + // when fetching the row + else if (!row.containsKey(objectUriCol.getName())) + { + throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); + } + } + + return row; + } + + + private Object[] convertToTypedValues(Object[] keys, List cols) + { + Object[] typedParameters = new Object[keys.length]; + int t = 0; + for (int i = 0; i < keys.length; i++) + { + if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) + { + typedParameters[t++] = keys[i]; + continue; + } + Object v = keys[i]; + JdbcType type = cols.get(i).getJdbcType(); + if (v instanceof String) + v = type.convert(v); + Parameter.TypedValue tv = new Parameter.TypedValue(v, type); + typedParameters[t++] = tv; + } + return typedParameters; + } + + + @Override + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, row); + convertTypes(user, container, row); + setSpecialColumns(container, row, user, InsertPermission.class); + validateInsertRow(row); + return _insert(user, container, row); + } + + protected Map _insert(User user, Container c, Map row) + throws SQLException, ValidationException + { + assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + List pds = new ArrayList<>(); + Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + pds.add(pd); + Object value = getPropertyValue(row, pd); + values.put(pd.getPropertyURI(), value); + } + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); + String lsid = collector.getLsid(); + + // Add the new lsid to the row map. + row.put(objectUriCol.getName(), lsid); + } + + return Table.insert(user, getDbTable(), row); + } + catch (RuntimeValidationException e) + { + throw e.getValidationException(); + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + } + + @Override + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return updateRow(user, container, row, oldRow, false, false); + } + + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); + + // Flip the key/value pairs around for easy lookup + Map queryToDb = new CaseInsensitiveHashMap<>(); + for (Map.Entry entry : _columnMapping.entrySet()) + { + queryToDb.put(entry.getValue(), entry.getKey()); + } + + setSpecialColumns(container, row, user, UpdatePermission.class); + + Map tableAliasesMap = _tableMapSupplier.get(); + Map> colFrequency = new HashMap<>(); + + //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table + for (Map.Entry entry: row.entrySet()) + { + if (!rowStripped.containsKey(entry.getKey())) + { + ColumnInfo col = getQueryTable().getColumn(entry.getKey()); + + if (null == col) + { + col = tableAliasesMap.get(entry.getKey()); + } + + if (null != col) + { + final String name = col.getName(); + + // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. + if (col.isReadOnly() || col.isCalculated()) + continue; + + //when updating a row, we should strip the following fields, as they are + //automagically maintained by the table layer, and should not be allowed + //to change once the record exists. + //unfortunately, the Table.update() method doesn't strip these, so we'll + //do that here. + // Owner, CreatedBy, Created, EntityId + if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) + || (!allowOwner && name.equalsIgnoreCase("Owner")) + || name.equalsIgnoreCase("EntityId")) + continue; + + // Throw error if more than one row properties having different values match up to the same column. + if (!colFrequency.containsKey(col)) + { + colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); + } + else + { + if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) + { + throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); + } + } + + // We want a map using the DbTable column names as keys, so figure out the right name to use + String dbName = queryToDb.getOrDefault(name, name); + rowStripped.put(dbName, entry.getValue()); + } + } + } + + convertTypes(user, container, rowStripped); + validateUpdateRow(rowStripped); + + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (rowContainer == null) + { + throw new ValidationException("Unknown container: " + row.get("container")); + } + else + { + Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (null != oldContainer && !rowContainer.equals(oldContainer)) + throw new UnauthorizedException("The row is from the wrong container."); + } + } + + Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); + + //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes + //the primary key columns as well as those marked read-only. So we can't simply return the map returned + //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. + row.putAll(updatedRow); + return row; + } + + protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException + { + DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); + List validators = ColumnValidators.create(column, dp); + for (ColumnValidator v : validators) + { + String msg = v.validate(-1, value, _validatorContext, providedValue); + if (msg != null) + throw new ValidationException(msg, column.getName()); + } + } + + protected void validateInsertRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + Object value = row.get(col.getColumnName()); + + // Check required values aren't null or empty + if (null == value || value instanceof String s && s.isEmpty()) + { + if (!col.isAutoIncrement() && col.isRequired() && + !getAutoPopulatedColumns().contains(col.getName()) && + col.getJdbcDefaultValue() == null) + { + throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); + } + } + else + { + validateValue(col, value, null); + } + } + } + + protected void validateUpdateRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + // Only validate incoming values + if (row.containsKey(col.getColumnName())) + { + Object value = row.get(col.getColumnName()); + validateValue(col, value, null); + } + } + } + + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) + throws SQLException, ValidationException + { + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + + // The lsid may be null for the row until a property has been inserted + String lsid = null; + if (objectUriCol != null) + lsid = (String) oldRow.get(objectUriCol.getName()); + + List tableProperties = new ArrayList<>(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + tableProperties.add(pd); + + // clear out the old value if it exists and is contained in the new row (it may be incoming as null) + if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) + OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); + + Object value = getPropertyValue(row, pd); + if (value != null) + newValues.put(pd.getPropertyURI(), value); + } + + // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() + newValues.put(objectUriCol.getName(), lsid); + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); + + // Update the lsid in the row: the lsid may have not existed in the row before the update. + lsid = collector.getLsid(); + row.put(objectUriCol.getName(), lsid); + } + + // Get lsid value if it hasn't been set. + // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) + if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) + { + String objectUriColName = updateableTableInfo.getObjectURIColumnName(); + if (objectUriColName != null) + lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); + } + + // handle vocabulary properties + if (lsid != null) + { + for (Map.Entry rowEntry : row.entrySet()) + { + String colName = rowEntry.getKey(); + Object value = rowEntry.getValue(); + + ColumnInfo col = getQueryTable().getColumn(colName); + if (col instanceof PropertyColumn propCol) + { + PropertyDescriptor pd = propCol.getPropertyDescriptor(); + if (pd.isVocabulary() && !tableProperties.contains(pd)) + { + OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); + } + } + } + } + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + + checkDuplicateUpdate(keys); + + return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) + } + + private static class LsidCollector implements OntologyManager.RowCallback + { + private String _lsid; + + @Override + public void rowProcessed(Map row, String lsid) + { + if (_lsid != null) + { + throw new IllegalStateException("Only expected a single LSID"); + } + _lsid = lsid; + } + + public String getLsid() + { + if (_lsid == null) + { + throw new IllegalStateException("No LSID returned"); + } + return _lsid; + } + } + + // Get value from row map where the keys are column names. + private Object getPropertyValue(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return row.get(pd.getName()); + + if (row.containsKey(pd.getLabel())) + return row.get(pd.getLabel()); + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return row.get(alias); + } + + return null; + } + + // Checks a value exists in the row map (value may be null) + private boolean hasProperty(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return true; + + if (row.containsKey(pd.getLabel())) + return true; + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return true; + } + + return false; + } + + @Override + protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException + { + if (oldRowMap == null) + return null; + + aliasColumns(_columnMapping, oldRowMap); + + if (container != null && getDbTable().getColumn("container") != null) + { + // UNDONE: 9077: check container permission on each row before delete + Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); + if (null != rowContainer && !container.equals(rowContainer)) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (container.allowRowMutationForContainer(rowContainer)) + container = rowContainer; + else + throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); + } + } + + _delete(container, oldRowMap); + return oldRowMap; + } + + protected void _delete(Container c, Map row) throws InvalidKeyException + { + ColumnInfo objectUriCol = getObjectUriColumn(); + if (objectUriCol != null) + { + String lsid = (String)row.get(objectUriCol.getName()); + if (lsid != null) + { + OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); + if (oo != null) + OntologyManager.deleteProperties(c, oo.getObjectId()); + } + } + Table.delete(getDbTable(), getKeys(row, c)); + } + + // classes should override this method if they need to do more work than delete all the rows from the table + // this implementation will delete all rows from the table for the given container as well as delete + // any properties associated with the table + @Override + protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException + { + // get rid of the properties for this table + if (null != getObjectUriColumn()) + { + SQLFragment lsids = new SQLFragment() + .append("SELECT t.").append(getObjectUriColumn().getColumnName()) + .append(" FROM ").append(getDbTable(), "t") + .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); + if (null != getDbTable().getColumn("container")) + { + lsids.append(" AND t.Container = ?"); + lsids.add(container.getId()); + } + + OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); + } + + // delete all the rows in this table, scoping to the container if the column + // is available + if (null != getDbTable().getColumn("container")) + return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); + + return Table.delete(getDbTable()); + } + + protected Object[] getKeys(Map map, Container container) throws InvalidKeyException + { + //build an array of pk values based on the table info + TableInfo table = getDbTable(); + List pks = table.getPkColumns(); + Object[] pkVals = new Object[pks.size()]; + + if (map == null || map.isEmpty()) + return pkVals; + + for (int idx = 0; idx < pks.size(); ++idx) + { + ColumnInfo pk = pks.get(idx); + Object pkValue = map.get(pk.getName()); + // Check the type and coerce if needed + if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) + { + try + { + pkValue = pk.convert(pkValue); + } + catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } + } + pkVals[idx] = pkValue; + if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) + { + pkVals[idx] = container; + } + if(null == pkVals[idx]) + { + throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); + } + } + return pkVals; + } + + private Map _missingValues = null; + private Container _missingValuesContainer; + + protected boolean validMissingValue(Container c, String mv) + { + if (null == c) + return false; + if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) + { + _missingValues = MvUtil.getIndicatorsAndLabels(c); + _missingValuesContainer = c; + } + return _missingValues.containsKey(mv); + } + + final protected void convertTypes(User user, Container c, Map row) throws ValidationException + { + convertTypes(user, c, row, getDbTable(), null); + } + + // TODO Path->FileObject + // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? + protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException + { + for (ColumnInfo col : t.getColumns()) + { + if (col.isMvIndicatorColumn()) + continue; + boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); + if (!isColumnPresent) + continue; + + Object value = row.get(col.getName()); + + /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. + * If you update this code, check that code as well. */ + if (col.isMvEnabled()) + { + if (value instanceof String s && StringUtils.isEmpty(s)) + value = null; + + Object mvObj = row.get(col.getMvColumnName().getName()); + String mv = Objects.toString(mvObj, null); + if (StringUtils.isEmpty(mv)) + mv = null; + + if (null != mv) + { + if (!validMissingValue(c, mv)) + throw new ValidationException("Value is not a valid missing value indicator: " + mv); + } + else if (null != value) + { + String s = Objects.toString(value, null); + if (validMissingValue(c, s)) + { + mv = s; + value = null; + } + } + row.put(col.getMvColumnName().getName(), mv); + } + + value = convertColumnValue(col, value, user, c, fileLinkDirPath); + row.put(col.getName(), value); + } + } + + protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException + { + // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() + // improve handling of conversion errors + try + { + if (PropertyType.FILE_LINK == col.getPropertyType()) + { + if ((value instanceof MultipartFile || value instanceof AttachmentFile)) + { + FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); + value = fl.toNioPathForRead().toString(); + } + return ExpDataFileConverter.convert(value); + } + return col.getConvertFn().convert(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw new ValidationException(e.getMessage()); + } + catch (ConversionException e) + { + String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); + throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); + } + catch (QueryUpdateServiceException e) + { + throw new ValidationException("Save file link failed: " + col.getName()); + } + } + + /** + * Override this method to alter the row before insert or update. + * For example, you can automatically adjust certain column values based on context. + * @param container The current container + * @param row The row data + * @param user The current user + * @param clazz A permission class to test + */ + protected void setSpecialColumns(Container container, Map row, User user, Class clazz) + { + if (null != container) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); + if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) + { + row.put("container", rowContainer.getId()); //normalize to container ID + return; //accept the row-provided value + } + } + row.put("container", container.getId()); + } + } + + protected boolean hasAttachmentProperties() + { + Domain domain = getDomain(); + if (null != domain) + { + for (DomainProperty dp : domain.getProperties()) + if (null != dp && isAttachmentProperty(dp)) + return true; + } + return false; + } + + protected boolean isAttachmentProperty(@NotNull DomainProperty dp) + { + PropertyDescriptor pd = dp.getPropertyDescriptor(); + return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); + } + + protected boolean isAttachmentProperty(String name) + { + DomainProperty dp = getDomain().getPropertyByName(name); + if (dp != null) + return isAttachmentProperty(dp); + return false; + } + + protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException + { + if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) + { + boolean hasContainerField = false; + for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) + { + String fieldName = columnDescriptor.getColumnName(); + if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) + { + hasContainerField = true; + break; + } + } + if (!hasContainerField) + context.setCrossFolderImport(false); + } + } +} diff --git a/experiment/src/org/labkey/experiment/ExperimentModule.java b/experiment/src/org/labkey/experiment/ExperimentModule.java index 9eb5e1808e3..4f1dae671a6 100644 --- a/experiment/src/org/labkey/experiment/ExperimentModule.java +++ b/experiment/src/org/labkey/experiment/ExperimentModule.java @@ -1,1173 +1,1173 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment; - -import org.apache.commons.lang3.math.NumberUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONObject; -import org.labkey.api.admin.FolderSerializationRegistry; -import org.labkey.api.assay.AssayProvider; -import org.labkey.api.assay.AssayService; -import org.labkey.api.attachments.AttachmentService; -import org.labkey.api.audit.AuditLogService; -import org.labkey.api.audit.SampleTimelineAuditEvent; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerFilter; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DbSchema; -import org.labkey.api.data.DbSchemaType; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.NameGenerator; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.SimpleFilter.FilterClause; -import org.labkey.api.data.SqlSelector; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpgradeCode; -import org.labkey.api.defaults.DefaultValueService; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.ExperimentRunType; -import org.labkey.api.exp.Lsid; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.DefaultExperimentDataHandler; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataClass; -import org.labkey.api.exp.api.ExpLineageService; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpProtocolAttachmentType; -import org.labkey.api.exp.api.ExpRunAttachmentType; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentJSONConverter; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.FilterProtocolInputCriteria; -import org.labkey.api.exp.api.SampleTypeDomainKind; -import org.labkey.api.exp.api.SampleTypeService; -import org.labkey.api.exp.api.StorageProvisioner; -import org.labkey.api.exp.property.DomainAuditProvider; -import org.labkey.api.exp.property.DomainPropertyAuditProvider; -import org.labkey.api.exp.property.ExperimentProperty; -import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.exp.property.SystemProperty; -import org.labkey.api.exp.query.ExpDataClassTable; -import org.labkey.api.exp.query.ExpSampleTypeTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.exp.query.SamplesSchema; -import org.labkey.api.exp.xar.LSIDRelativizer; -import org.labkey.api.exp.xar.LsidUtils; -import org.labkey.api.files.FileContentService; -import org.labkey.api.files.TableUpdaterFileListener; -import org.labkey.api.migration.DatabaseMigrationService; -import org.labkey.api.migration.ExperimentDeleteService; -import org.labkey.api.migration.MigrationTableHandler; -import org.labkey.api.module.ModuleContext; -import org.labkey.api.module.ModuleLoader; -import org.labkey.api.module.SpringModule; -import org.labkey.api.module.Summary; -import org.labkey.api.ontology.OntologyService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.ontology.Unit; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.FilteredTable; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.UserSchema; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.roles.RoleManager; -import org.labkey.api.settings.AppProps; -import org.labkey.api.settings.OptionalFeatureService; -import org.labkey.api.usageMetrics.UsageMetricsService; -import org.labkey.api.util.GUID; -import org.labkey.api.util.JspTestCase; -import org.labkey.api.util.PageFlowUtil; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.SystemMaintenance; -import org.labkey.api.view.AlwaysAvailableWebPartFactory; -import org.labkey.api.view.BaseWebPartFactory; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.JspView; -import org.labkey.api.view.Portal; -import org.labkey.api.view.ViewContext; -import org.labkey.api.view.WebPartFactory; -import org.labkey.api.view.WebPartView; -import org.labkey.api.view.template.WarningService; -import org.labkey.api.vocabulary.security.DesignVocabularyPermission; -import org.labkey.api.webdav.WebdavResource; -import org.labkey.api.webdav.WebdavService; -import org.labkey.api.writer.ContainerUser; -import org.labkey.experiment.api.DataClassDomainKind; -import org.labkey.experiment.api.ExpDataClassImpl; -import org.labkey.experiment.api.ExpDataClassTableImpl; -import org.labkey.experiment.api.ExpDataClassType; -import org.labkey.experiment.api.ExpDataImpl; -import org.labkey.experiment.api.ExpDataTableImpl; -import org.labkey.experiment.api.ExpMaterialImpl; -import org.labkey.experiment.api.ExpProtocolImpl; -import org.labkey.experiment.api.ExpSampleTypeImpl; -import org.labkey.experiment.api.ExpSampleTypeTableImpl; -import org.labkey.experiment.api.ExperimentServiceImpl; -import org.labkey.experiment.api.ExperimentStressTest; -import org.labkey.experiment.api.GraphAlgorithms; -import org.labkey.experiment.api.LineageTest; -import org.labkey.experiment.api.LogDataType; -import org.labkey.experiment.api.Protocol; -import org.labkey.experiment.api.SampleTypeServiceImpl; -import org.labkey.experiment.api.SampleTypeUpdateServiceDI; -import org.labkey.experiment.api.UniqueValueCounterTestCase; -import org.labkey.experiment.api.VocabularyDomainKind; -import org.labkey.experiment.api.data.ChildOfCompareType; -import org.labkey.experiment.api.data.ChildOfMethod; -import org.labkey.experiment.api.data.LineageCompareType; -import org.labkey.experiment.api.data.ParentOfCompareType; -import org.labkey.experiment.api.data.ParentOfMethod; -import org.labkey.experiment.api.property.DomainImpl; -import org.labkey.experiment.api.property.DomainPropertyImpl; -import org.labkey.experiment.api.property.LengthValidator; -import org.labkey.experiment.api.property.LookupValidator; -import org.labkey.experiment.api.property.PropertyServiceImpl; -import org.labkey.experiment.api.property.RangeValidator; -import org.labkey.experiment.api.property.RegExValidator; -import org.labkey.experiment.api.property.StorageNameGenerator; -import org.labkey.experiment.api.property.StorageProvisionerImpl; -import org.labkey.experiment.api.property.TextChoiceValidator; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.experiment.controllers.property.PropertyController; -import org.labkey.experiment.defaults.DefaultValueServiceImpl; -import org.labkey.experiment.lineage.ExpLineageServiceImpl; -import org.labkey.experiment.lineage.LineagePerfTest; -import org.labkey.experiment.pipeline.ExperimentPipelineProvider; -import org.labkey.experiment.pipeline.XarTestPipelineJob; -import org.labkey.experiment.samples.DataClassFolderImporter; -import org.labkey.experiment.samples.DataClassFolderWriter; -import org.labkey.experiment.samples.SampleStatusFolderImporter; -import org.labkey.experiment.samples.SampleTimelineAuditProvider; -import org.labkey.experiment.samples.SampleTypeFolderImporter; -import org.labkey.experiment.samples.SampleTypeFolderWriter; -import org.labkey.experiment.security.DataClassDesignerRole; -import org.labkey.experiment.security.SampleTypeDesignerRole; -import org.labkey.experiment.types.TypesController; -import org.labkey.experiment.xar.FolderXarImporterFactory; -import org.labkey.experiment.xar.FolderXarWriterFactory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -import static org.labkey.api.data.ColumnRenderPropertiesImpl.STORAGE_UNIQUE_ID_CONCEPT_URI; -import static org.labkey.api.data.ColumnRenderPropertiesImpl.TEXT_CHOICE_CONCEPT_URI; -import static org.labkey.api.exp.api.ExperimentService.MODULE_NAME; -import static org.labkey.api.exp.query.ExpSchema.SAMPLE_FILES_TABLE; - -public class ExperimentModule extends SpringModule -{ - private static final String SAMPLE_TYPE_WEB_PART_NAME = "Sample Types"; - private static final String PROTOCOL_WEB_PART_NAME = "Protocols"; - - public static final String AMOUNT_AND_UNIT_UPGRADE_PROP = "AmountAndUnitAudit"; - public static final String TRANSACTION_ID_PROP = "AuditTransactionId"; - public static final String AUDIT_COUNT_PROP = "AuditRecordCount"; - public static final String EXPERIMENT_RUN_WEB_PART_NAME = "Experiment Runs"; - - @Override - public String getName() - { - return MODULE_NAME; - } - - @Override - public Double getSchemaVersion() - { - return 26.005; - } - - @Nullable - @Override - public UpgradeCode getUpgradeCode() - { - return new ExperimentUpgradeCode(); - } - - @Override - protected void init() - { - addController("experiment", ExperimentController.class); - addController("experiment-types", TypesController.class); - addController("property", PropertyController.class); - ExperimentService.setInstance(new ExperimentServiceImpl()); - SampleTypeService.setInstance(new SampleTypeServiceImpl()); - DefaultValueService.setInstance(new DefaultValueServiceImpl()); - StorageProvisioner.setInstance(StorageProvisionerImpl.get()); - ExpLineageService.setInstance(new ExpLineageServiceImpl()); - - PropertyServiceImpl propertyServiceImpl = new PropertyServiceImpl(); - PropertyService.setInstance(propertyServiceImpl); - UsageMetricsService.get().registerUsageMetrics(getName(), propertyServiceImpl); - - UsageMetricsService.get().registerUsageMetrics(getName(), FileLinkMetricsProvider.getInstance()); - - ExperimentProperty.register(); - SamplesSchema.register(this); - ExpSchema.register(this); - - PropertyService.get().registerDomainKind(new SampleTypeDomainKind()); - PropertyService.get().registerDomainKind(new DataClassDomainKind()); - PropertyService.get().registerDomainKind(new VocabularyDomainKind()); - - QueryService.get().addCompareType(new ChildOfCompareType()); - QueryService.get().addCompareType(new ParentOfCompareType()); - QueryService.get().addCompareType(new LineageCompareType()); - QueryService.get().registerMethod(ChildOfMethod.NAME, new ChildOfMethod(), JdbcType.BOOLEAN, 2, 3); - QueryService.get().registerMethod(ParentOfMethod.NAME, new ParentOfMethod(), JdbcType.BOOLEAN, 2, 3); - QueryService.get().addQueryListener(new ExperimentQueryChangeListener()); - QueryService.get().addQueryListener(new PropertyQueryChangeListener()); - - PropertyService.get().registerValidatorKind(new RegExValidator()); - PropertyService.get().registerValidatorKind(new RangeValidator()); - PropertyService.get().registerValidatorKind(new LookupValidator()); - PropertyService.get().registerValidatorKind(new LengthValidator()); - PropertyService.get().registerValidatorKind(new TextChoiceValidator()); - - ExperimentService.get().registerExperimentDataHandler(new DefaultExperimentDataHandler()); - ExperimentService.get().registerProtocolInputCriteria(new FilterProtocolInputCriteria.Factory()); - ExperimentService.get().registerNameExpressionType("sampletype", "exp", "MaterialSource", "nameexpression"); - ExperimentService.get().registerNameExpressionType("aliquots", "exp", "MaterialSource", "aliquotnameexpression"); - ExperimentService.get().registerNameExpressionType("dataclass", "exp", "DataClass", "nameexpression"); - - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.EXPERIMENTAL_RESOLVE_PROPERTY_URI_COLUMNS, "Resolve property URIs as columns on experiment tables", - "If a column is not found on an experiment table, attempt to resolve the column name as a Property URI and add it as a property column", false); - if (CoreSchema.getInstance().getSqlDialect().isSqlServer()) - { - OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_WITH_COUNTER, "Use strict incremental withCounter and rootSampleCount expression", - "When withCounter or rootSampleCount is used in name expression, make sure the count increments one-by-one and does not jump.", true); - } - else - { - OptionalFeatureService.get().addExperimentalFeatureFlag(SAMPLE_FILES_TABLE, "Manage Unreferenced Sample Files", - "Enable 'Unreferenced Sample Files' table to view and delete sample files that are no longer referenced by samples", false); - - OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_ALLOW_GAP_COUNTER, "Allow gap with withCounter and rootSampleCount expression", - "Check this option if gaps in the count generated by withCounter or rootSampleCount name expression are allowed.", true); - - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.MULTI_VALUE_TEXT_CHOICE, "Allow multi-value Text Choice properties", - "Support selecting more than one value for text choice fields", false); - } - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.QUANTITY_COLUMN_SUFFIX_TESTING, "Quantity column suffix testing", - "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); - OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", - "Support for querying lineage of experiment objects", false); - OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE, "Allow RowId to be accepted when merging samples or dataclass data", - "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); - - RoleManager.registerPermission(new DesignVocabularyPermission(), true); - RoleManager.registerRole(new SampleTypeDesignerRole()); - RoleManager.registerRole(new DataClassDesignerRole()); - - AttachmentService.get().registerAttachmentParentType(ExpRunAttachmentType.get()); - AttachmentService.get().registerAttachmentParentType(ExpProtocolAttachmentType.get()); - - WebdavService.get().addExpDataProvider((path, container) -> ExperimentService.get().getAllExpDataByURL(path, container)); - ExperimentService.get().registerObjectReferencer(ExperimentServiceImpl.get()); - - addModuleProperty(new LineageMaximumDepthModuleProperty(this)); - WarningService.get().register(new ExperimentWarningProvider()); - } - - @Override - public boolean hasScripts() - { - return true; - } - - @Override - @NotNull - protected Collection createWebPartFactories() - { - List result = new ArrayList<>(); - - BaseWebPartFactory runGroupsFactory = new BaseWebPartFactory(RunGroupWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new RunGroupWebPart(portalCtx, WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), webPart); - } - }; - runGroupsFactory.addLegacyNames("Experiments", "Experiment", "Experiment Navigator", "Narrow Experiments"); - result.add(runGroupsFactory); - - BaseWebPartFactory runTypesFactory = new BaseWebPartFactory(RunTypeWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new RunTypeWebPart(); - } - }; - result.add(runTypesFactory); - - result.add(new ExperimentRunWebPartFactory()); - BaseWebPartFactory sampleTypeFactory = new BaseWebPartFactory(SAMPLE_TYPE_WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new SampleTypeWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); - } - }; - sampleTypeFactory.addLegacyNames("Narrow Sample Sets", "Sample Sets"); - result.add(sampleTypeFactory); - result.add(new AlwaysAvailableWebPartFactory("Samples Menu", false, false, WebPartFactory.LOCATION_MENUBAR) { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - WebPartView view = new JspView<>("/org/labkey/experiment/samplesAndAnalytes.jsp", webPart); - view.setTitle("Samples"); - return view; - } - }); - - result.add(new AlwaysAvailableWebPartFactory("Data Classes", false, false, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new DataClassWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx, webPart); - } - }); - - BaseWebPartFactory narrowProtocolFactory = new BaseWebPartFactory(PROTOCOL_WEB_PART_NAME, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new ProtocolWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); - } - }; - narrowProtocolFactory.addLegacyNames("Narrow Protocols"); - result.add(narrowProtocolFactory); - - return result; - } - - private void addDataResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver() - { - @Override - public WebdavResource resolve(@NotNull String resourceIdentifier) - { - ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); - if (data == null) - return null; - - return data.createIndexDocument(null); - } - - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); - if (data == null) - return null; - - return ExperimentJSONConverter.serializeData(data, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); - } - - @Override - public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) - { - Map idDataMap = ExpDataImpl.fromDocumentIds(resourceIdentifiers); - if (idDataMap == null) - return null; - - Map> searchJsonMap = new HashMap<>(); - for (String resourceIdentifier : idDataMap.keySet()) - searchJsonMap.put(resourceIdentifier, ExperimentJSONConverter.serializeData(idDataMap.get(resourceIdentifier), user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap()); - return searchJsonMap; - } - }); - } - - private void addDataClassResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpDataClass dataClass = ExperimentService.get().getDataClass(rowId); - if (dataClass == null) - return null; - - Map properties = ExperimentJSONConverter.serializeExpObject(dataClass, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); - - //Need to map to proper Icon - properties.put("type", "dataClass" + (dataClass.getCategory() != null ? ":" + dataClass.getCategory() : "")); - - return properties; - } - }); - } - - private void addSampleTypeResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpSampleType sampleType = SampleTypeService.get().getSampleType(rowId); - if (sampleType == null) - return null; - - Map properties = ExperimentJSONConverter.serializeExpObject(sampleType, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); - - //Need to map to proper Icon - properties.put("type", "sampleSet"); - - return properties; - } - }); - } - - private void addSampleResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpMaterial material = ExperimentService.get().getExpMaterial(rowId); - if (material == null) - return null; - - return ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); - } - - @Override - public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) - { - Set rowIds = new HashSet<>(); - Map rowIdIdentifierMap = new LongHashMap<>(); - for (String resourceIdentifier : resourceIdentifiers) - { - long rowId = NumberUtils.toLong(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId != 0) - { - rowIds.add(rowId); - rowIdIdentifierMap.put(rowId, resourceIdentifier); - } - } - - Map> searchJsonMap = new HashMap<>(); - for (ExpMaterial material : ExperimentService.get().getExpMaterials(rowIds)) - { - searchJsonMap.put( - rowIdIdentifierMap.get(material.getRowId()), - ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap() - ); - } - - return searchJsonMap; - } - }); - } - - @Override - protected void startupAfterSpringConfig(ModuleContext moduleContext) - { - SearchService ss = SearchService.get(); -// ss.addSearchCategory(OntologyManager.conceptCategory); - ss.addSearchCategory(ExpSampleTypeImpl.searchCategory); - ss.addSearchCategory(ExpSampleTypeImpl.mediaSearchCategory); - ss.addSearchCategory(ExpMaterialImpl.searchCategory); - ss.addSearchCategory(ExpMaterialImpl.mediaSearchCategory); - ss.addSearchCategory(ExpDataClassImpl.SEARCH_CATEGORY); - ss.addSearchCategory(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY); - ss.addSearchCategory(ExpDataImpl.expDataCategory); - ss.addSearchCategory(ExpDataImpl.expMediaDataCategory); - ss.addSearchResultTemplate(new ExpDataImpl.DataSearchResultTemplate()); - addDataResourceResolver(ExpDataImpl.expDataCategory.getName()); - addDataResourceResolver(ExpDataImpl.expMediaDataCategory.getName()); - addDataClassResourceResolver(ExpDataClassImpl.SEARCH_CATEGORY.getName()); - addDataClassResourceResolver(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY.getName()); - addSampleTypeResourceResolver(ExpSampleTypeImpl.searchCategory.getName()); - addSampleTypeResourceResolver(ExpSampleTypeImpl.mediaSearchCategory.getName()); - addSampleResourceResolver(ExpMaterialImpl.searchCategory.getName()); - addSampleResourceResolver(ExpMaterialImpl.mediaSearchCategory.getName()); - ss.addDocumentProvider(ExperimentServiceImpl.get()); - - PipelineService.get().registerPipelineProvider(new ExperimentPipelineProvider(this)); - ExperimentService.get().registerExperimentRunTypeSource(container -> Collections.singleton(ExperimentRunType.ALL_RUNS_TYPE)); - ExperimentService.get().registerDataType(new LogDataType()); - - AuditLogService.get().registerAuditType(new DomainAuditProvider()); - AuditLogService.get().registerAuditType(new DomainPropertyAuditProvider()); - AuditLogService.get().registerAuditType(new ExperimentAuditProvider()); - AuditLogService.get().registerAuditType(new SampleTypeAuditProvider()); - AuditLogService.get().registerAuditType(new SampleTimelineAuditProvider()); - - FileContentService fileContentService = FileContentService.get(); - if (null != fileContentService) - { - fileContentService.addFileListener(new ExpDataFileListener()); - fileContentService.addFileListener(new TableUpdaterFileListener(ExperimentService.get().getTinfoExperimentRun(), "FilePathRoot", TableUpdaterFileListener.Type.fileRootPath, "RowId")); - fileContentService.addFileListener(new FileLinkFileListener()); - } - ContainerManager.addContainerListener(new ContainerManager.ContainerListener() - { - @Override - public void containerDeleted(Container c, User user) - { - try - { - ExperimentService.get().deleteAllExpObjInContainer(c, user); - } - catch (ExperimentException ee) - { - throw new RuntimeException(ee); - } - } - }, - // This is in the Last group because when a container is deleted, - // the Experiment listener needs to be called after the Study listener, - // because Study needs the metadata held by Experiment to delete properly. - // but it should be before the CoreContainerListener - ContainerManager.ContainerListener.Order.Last); - - if (ModuleLoader.getInstance().shouldInsertData()) - SystemProperty.registerProperties(); - - FolderSerializationRegistry folderRegistry = FolderSerializationRegistry.get(); - if (null != folderRegistry) - { - folderRegistry.addFactories(new FolderXarWriterFactory(), new FolderXarImporterFactory()); - folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDesignWriter.Factory()); - folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDataWriter.Factory()); - folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDesignWriter.Factory()); - folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDataWriter.Factory()); - folderRegistry.addImportFactory(new SampleTypeFolderImporter.Factory()); - folderRegistry.addImportFactory(new DataClassFolderImporter.Factory()); - folderRegistry.addImportFactory(new SampleStatusFolderImporter.Factory()); - } - - AttachmentService.get().registerAttachmentParentType(ExpDataClassType.get()); - - WebdavService.get().addProvider(new ScriptsResourceProvider()); - - SystemMaintenance.addTask(new FileLinkMetricsMaintenanceTask()); - - UsageMetricsService svc = UsageMetricsService.get(); - if (null != svc) - { - svc.registerUsageMetrics(getName(), () -> { - Map results = new HashMap<>(); - - DbSchema schema = ExperimentService.get().getSchema(); - if (AssayService.get() != null) - { - Map assayMetrics = new HashMap<>(); - SQLFragment baseRunSQL = new SQLFragment("SELECT COUNT(*) FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "r").append(" WHERE lsid LIKE ?"); - SQLFragment baseProtocolSQL = new SQLFragment("SELECT * FROM ").append(ExperimentService.get().getTinfoProtocol(), "p").append(" WHERE lsid LIKE ? AND ApplicationType = ?"); - for (AssayProvider assayProvider : AssayService.get().getAssayProviders()) - { - Map protocolMetrics = new HashMap<>(); - - // Run count across all assay designs of this type - SQLFragment runSQL = new SQLFragment(baseRunSQL); - runSQL.add(Lsid.namespaceLikeString(assayProvider.getRunLSIDPrefix())); - protocolMetrics.put("runCount", new SqlSelector(schema, runSQL).getObject(Long.class)); - - // Number of assay designs of this type - SQLFragment protocolSQL = new SQLFragment(baseProtocolSQL); - protocolSQL.add(assayProvider.getProtocolPattern()); - protocolSQL.add(ExpProtocol.ApplicationType.ExperimentRun.toString()); - List protocols = new SqlSelector(schema, protocolSQL).getArrayList(Protocol.class); - protocolMetrics.put("protocolCount", protocols.size()); - - List wrappedProtocols = protocols.stream().map(ExpProtocolImpl::new).collect(Collectors.toList()); - - protocolMetrics.put("resultRowCount", assayProvider.getResultRowCount(wrappedProtocols)); - - // Primary implementation class - protocolMetrics.put("implementingClass", assayProvider.getClass()); - - assayMetrics.put(assayProvider.getName(), protocolMetrics); - } - assayMetrics.put("autoLinkedAssayCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.propertyuri = 'terms.labkey.org#AutoCopyTargetContainer'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptRunOnEditCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptRunOnImportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); - - assayMetrics.put("standardAssayWithPlateSupportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'PlateMetadata' AND floatValue = 1").getObject(Long.class)); - SQLFragment runsWithPlateSQL = new SQLFragment(""" - SELECT COUNT(*) FROM exp.experimentrun r - INNER JOIN exp.object o ON o.objectUri = r.lsid - INNER JOIN exp.objectproperty op ON op.objectId = o.objectId - WHERE op.propertyid IN ( - SELECT propertyid FROM exp.propertydescriptor WHERE name = ? AND lookupquery = ? - )"""); - assayMetrics.put("standardAssayRunsWithPlateTemplate", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateTemplate").add("PlateTemplate")).getObject(Long.class)); - assayMetrics.put("standardAssayRunsWithPlateSet", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateSet").add("PlateSet")).getObject(Long.class)); - - assayMetrics.put("assayRunsFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - - assayMetrics.put("assayResultsFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - - // metric to count the number of Luminex and Standard assay runs that were imported with > 1 data file - assayMetrics.put("assayRunsWithMultipleInputFiles", new SqlSelector(schema, """ - SELECT COUNT(*) FROM ( - SELECT sourceapplicationid, COUNT(*) AS count FROM exp.data - WHERE lsid NOT LIKE '%:RelatedFile.%' AND sourceapplicationid IN ( - SELECT rowid FROM exp.protocolapplication - WHERE lsid LIKE '%:SimpleProtocol.CoreStep' AND (protocollsid LIKE '%:LuminexAssayProtocol.%' OR protocollsid LIKE '%:GeneralAssayProtocol.%') - ) - GROUP BY sourceapplicationid - ) x WHERE count > 1""").getObject(Long.class)); - - Map sampleLookupCountMetrics = new HashMap<>(); - SQLFragment baseAssaySampleLookupSQL = new SQLFragment("SELECT COUNT(*) FROM exp.propertydescriptor WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) AND propertyuri LIKE ?"); - - SQLFragment batchAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - batchAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Batch.getPrefix() + ".%"); - sampleLookupCountMetrics.put("batchDomain", new SqlSelector(schema, batchAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment runAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - runAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%"); - sampleLookupCountMetrics.put("runDomain", new SqlSelector(schema, runAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment resultAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - resultAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); - sampleLookupCountMetrics.put("resultDomain", new SqlSelector(schema, resultAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment resultAssayMultipleSampleLookupSQL = new SQLFragment( - """ - SELECT COUNT(*) FROM ( - SELECT PD.domainid, COUNT(*) AS PropCount - FROM exp.propertydescriptor D - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) - AND propertyuri LIKE ? - GROUP BY PD.domainid - ) X WHERE X.PropCount > 1""" - ); - resultAssayMultipleSampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); - sampleLookupCountMetrics.put("resultDomainWithMultiple", new SqlSelector(schema, resultAssayMultipleSampleLookupSQL).getObject(Long.class)); - - assayMetrics.put("sampleLookupCount", sampleLookupCountMetrics); - - - // Putting these metrics at the same level as the other BooleanColumnCount metrics (e.g., sampleTypeWithBooleanColumnCount) - results.put("assayResultWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - - results.put("assayRunWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - - results.put("assay", assayMetrics); - } - - results.put("autoLinkedSampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource WHERE autoLinkTargetContainer IS NOT NULL").getObject(Long.class)); - results.put("sampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource").getObject(Long.class)); - - if (schema.getSqlDialect().isPostgreSQL()) // SQLServer does not support regular expression queries - { - Collection> numSampleCounts = new SqlSelector(schema, """ - SELECT totalCount, numberNameCount FROM - (SELECT cpastype, COUNT(*) AS totalCount from exp.material GROUP BY cpastype) t - JOIN - (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.material m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns - ON t.cpastype = ns.cpastype""").getMapCollection(); - results.put("sampleSetWithNumberNamesCount", numSampleCounts.size()); - results.put("sampleSetWithOnlyNumberNamesCount", numSampleCounts.stream().filter( - map -> (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount") - ).count()); - } - UserSchema userSchema = AuditLogService.getAuditLogSchema(User.getSearchUser(), ContainerManager.getRoot()); - FilteredTable table = (FilteredTable) userSchema.getTable(SampleTimelineAuditEvent.EVENT_TYPE); - - SQLFragment sql = new SQLFragment("SELECT COUNT(*)\n" + - " FROM (\n" + - " -- updates that are marked as lineage updates\n" + - " (SELECT DISTINCT transactionId\n" + - " FROM " + table.getRealTable().getFromSQL("").getSQL() +"\n" + - " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanTRUE() + "\n" + - " AND comment = 'Sample was updated.'\n" + - " ) a1\n" + - " JOIN\n" + - " -- but have associated entries that are not lineage updates\n" + - " (SELECT DISTINCT transactionid\n" + - " FROM " + table.getRealTable().getFromSQL("").getSQL() + "\n" + - " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanFALSE() + ") a2\n" + - " ON a1.transactionid = a2.transactionid\n" + - " )"); - - results.put("sampleLineageAuditDiscrepancyCount", new SqlSelector(schema, sql.getSQL()).getObject(Long.class)); - - results.put("sampleCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material").getObject(Long.class)); - results.put("aliquotCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material where aliquotedfromlsid IS NOT NULL").getObject(Long.class)); - results.put("sampleNullAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount IS NULL").getObject(Long.class)); - results.put("sampleNegativeAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount < 0").getObject(Long.class)); - results.put("sampleUnitsDifferCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.material m JOIN exp.materialSource s ON m.materialsourceid = s.rowid WHERE m.units != s.metricunit").getObject(Long.class)); - results.put("sampleTypesWithoutUnitsCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IS NULL").getObject(Long.class)); - results.put("sampleTypesWithMassTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('kg', 'g', 'mg', 'ug', 'ng')").getObject(Long.class)); - results.put("sampleTypesWithVolumeTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('L', 'mL', 'uL')").getObject(Long.class)); - results.put("sampleTypesWithCountTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit = ?", "unit").getObject(Long.class)); - - results.put("duplicateSampleMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + - "(SELECT name, cpastype FROM exp.material WHERE cpastype <> 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); - results.put("duplicateSpecimenMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + - "(SELECT name, cpastype FROM exp.material WHERE cpastype = 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); - String duplicateCaseInsensitiveSampleNameCountSql = """ - SELECT COUNT(*) FROM - ( - SELECT 1 AS found - FROM exp.material - WHERE materialsourceid IS NOT NULL - GROUP BY LOWER(name), materialsourceid - HAVING COUNT(*) > 1 - ) AS duplicates - """; - String duplicateCaseInsensitiveDataNameCountSql = """ - SELECT COUNT(*) FROM - ( - SELECT 1 AS found - FROM exp.data - WHERE classid IS NOT NULL - GROUP BY LOWER(name), classid - HAVING COUNT(*) > 1 - ) AS duplicates - """; - results.put("duplicateCaseInsensitiveSampleNameCount", new SqlSelector(schema, duplicateCaseInsensitiveSampleNameCountSql).getObject(Long.class)); - results.put("duplicateCaseInsensitiveDataNameCount", new SqlSelector(schema, duplicateCaseInsensitiveDataNameCountSql).getObject(Long.class)); - - results.put("dataClassCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.dataclass").getObject(Long.class)); - results.put("dataClassRowCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.data WHERE classid IN (SELECT rowid FROM exp.dataclass)").getObject(Long.class)); - results.put("dataWithDataParentsCount", new SqlSelector(schema, "SELECT COUNT(DISTINCT d.sourceApplicationId) FROM exp.data d\n" + - "JOIN exp.datainput di ON di.targetapplicationid = d.sourceapplicationid").getObject(Long.class)); - if (schema.getSqlDialect().isPostgreSQL()) - { - Collection> numDataClassObjectsCounts = new SqlSelector(schema, """ - SELECT totalCount, numberNameCount FROM - (SELECT cpastype, COUNT(*) AS totalCount from exp.data GROUP BY cpastype) t - JOIN - (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.data m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns - ON t.cpastype = ns.cpastype""").getMapCollection(); - results.put("dataClassWithNumberNamesCount", numDataClassObjectsCounts.size()); - results.put("dataClassWithOnlyNumberNamesCount", numDataClassObjectsCounts.stream().filter(map -> - (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount")).count()); - } - - results.put("ontologyPrincipalConceptCodeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE principalconceptcode IS NOT NULL").getObject(Long.class)); - results.put("ontologyLookupColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", OntologyService.conceptCodeConceptURI).getObject(Long.class)); - results.put("ontologyConceptSubtreeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptsubtree IS NOT NULL").getObject(Long.class)); - results.put("ontologyConceptImportColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptimportcolumn IS NOT NULL").getObject(Long.class)); - results.put("ontologyConceptLabelColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptlabelcolumn IS NOT NULL").getObject(Long.class)); - - results.put("scannableColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE scannable = ?", true).getObject(Long.class)); - results.put("uniqueIdColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); - results.put("sampleTypeWithUniqueIdCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.conceptURI = ?""", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); - - results.put("fileColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithMultiValueColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("sampleTypeAliquotSpecificField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ChildOnly.name()).getObject(Long.class)); - results.put("sampleTypeParentOnlyField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND (D.derivationDataScope = ? OR D.derivationDataScope IS NULL)""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ParentOnly.name()).getObject(Long.class)); - results.put("sampleTypeParentAndAliquotField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.All.name()).getObject(Long.class)); - - results.put("attachmentColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithAttachmentColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithMultiValueColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("textChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", TEXT_CHOICE_CONCEPT_URI).getObject(Long.class)); - results.put("multiValueTextChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeuri = ?", PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithDateTimeColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.DATE_TIME.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithDateColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.DATE.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithTimeColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.TIME.getTypeUri()).getObject(Long.class)); - - results.put("maxObjectObjectId", new SqlSelector(schema, "SELECT MAX(ObjectId) FROM exp.Object").getObject(Long.class)); - results.put("maxMaterialRowId", new SqlSelector(schema, "SELECT MAX(RowId) FROM exp.Material").getObject(Long.class)); - - results.putAll(ExperimentService.get().getDomainMetrics()); - - return results; - }); - } - } - - @Override - public void registerMigrationHandlers(@NotNull DatabaseMigrationService service) - { - ExperimentMigrationSchemaHandler handler = new ExperimentMigrationSchemaHandler(); - service.registerSchemaHandler(handler); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("premium", DbSchemaType.Bare).getTable("Exclusions"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("premium", DbSchemaType.Bare).getTable("ExclusionMaps"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("ExclusionId", "RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("assayrequest", DbSchemaType.Bare).getTable("RequestRunsJunction"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerSchemaHandler(new SampleTypeMigrationSchemaHandler()); - DataClassMigrationSchemaHandler dcHandler = new DataClassMigrationSchemaHandler(); - service.registerSchemaHandler(dcHandler); - ExperimentDeleteService.setInstance(dcHandler); - } - - @Override - @NotNull - public Collection getSummary(Container c) - { - Collection list = new LinkedList<>(); - int runGroupCount = ExperimentService.get().getExperiments(c, null, false, true).size(); - if (runGroupCount > 0) - list.add(StringUtilsLabKey.pluralize(runGroupCount, "Run Group")); - - User user = HttpView.currentContext().getUser(); - - Set runTypes = ExperimentService.get().getExperimentRunTypes(c); - for (ExperimentRunType runType : runTypes) - { - if (runType == ExperimentRunType.ALL_RUNS_TYPE) - continue; - - long runCount = runType.getRunCount(user, c); - if (runCount > 0) - list.add(runCount + " runs of type " + runType.getDescription()); - } - - int dataClassCount = ExperimentService.get().getDataClasses(c, false).size(); - if (dataClassCount > 0) - list.add(dataClassCount + " Data Class" + (dataClassCount > 1 ? "es" : "")); - - int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); - if (sampleTypeCount > 0) - list.add(sampleTypeCount + " Sample Type" + (sampleTypeCount > 1 ? "s" : "")); - - return list; - } - - @Override - public @NotNull ArrayList getDetailedSummary(Container c, User user) - { - ArrayList summaries = new ArrayList<>(); - - // Assay types - long assayTypeCount = AssayService.get().getAssayProtocols(c).stream().filter(p -> p.getContainer().equals(c)).count(); - if (assayTypeCount > 0) - summaries.add(new Summary(assayTypeCount, "Assay Type")); - - // Run count - int runGroupCount = ExperimentService.get().getExperiments(c, user, false, true).size(); - if (runGroupCount > 0) - summaries.add(new Summary(runGroupCount, "Assay run")); - - // Number of Data Classes - List dataClasses = ExperimentService.get().getDataClasses(c, false); - int dataClassCount = dataClasses.size(); - if (dataClassCount > 0) - summaries.add(new Summary(dataClassCount, "Data Class")); - - ExpSchema expSchema = new ExpSchema(user, c); - - // Individual Data Class row counts - { - // The table-level container filter is set to ensure data class types are included - // that may not be defined in the target container but may have rows of data in the target container - TableInfo table = ExpSchema.TableType.DataClasses.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); - - // Issue 47919: The "DataCount" column is filtered to only count data in the target container - if (table instanceof ExpDataClassTableImpl tableImpl) - tableImpl.setDataCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); - - Set columns = new LinkedHashSet<>(); - columns.add(ExpDataClassTable.Column.Name.name()); - columns.add(ExpDataClassTable.Column.DataCount.name()); - - Map results = new TableSelector(table, columns).getValueMap(String.class); - for (var entry : results.entrySet()) - { - long count = entry.getValue().longValue(); - if (count > 0) - summaries.add(new Summary(count, entry.getKey())); - } - } - - // Sample Types - int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); - if (sampleTypeCount > 0) - summaries.add(new Summary(sampleTypeCount, "Sample Type")); - - // Individual Sample Type row counts - { - // The table-level container filter is set to ensure data class types are included - // that may not be defined in the target container but may have rows of data in the target container - TableInfo table = ExpSchema.TableType.SampleSets.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); - - // Issue 51557: The "SampleCount" column is filtered to only count data in the target container - if (table instanceof ExpSampleTypeTableImpl tableImpl) - tableImpl.setSampleCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); - - Set columns = new LinkedHashSet<>(); - columns.add(ExpSampleTypeTable.Column.Name.name()); - columns.add(ExpSampleTypeTable.Column.SampleCount.name()); - - Map results = new TableSelector(table, columns).getValueMap(String.class); - for (var entry : results.entrySet()) - { - long count = entry.getValue().longValue(); - if (count > 0) - { - String name = entry.getKey(); - Summary s = name.equals("MixtureBatches") - ? new Summary(count, "Batch") - : new Summary(count, name); - summaries.add(s); - } - } - } - - return summaries; - } - - @Override - public @NotNull Set> getIntegrationTests() - { - return Set.of( - DomainImpl.TestCase.class, - DomainPropertyImpl.TestCase.class, - ExpDataTableImpl.TestCase.class, - ExperimentServiceImpl.AuditDomainUriTest.class, - ExperimentServiceImpl.LineageQueryTestCase.class, - ExperimentServiceImpl.ParseInputOutputAliasTestCase.class, - ExperimentServiceImpl.TestCase.class, - ExperimentStressTest.class, - LineagePerfTest.class, - LineageTest.class, - OntologyManager.TestCase.class, - PropertyServiceImpl.TestCase.class, - SampleTypeServiceImpl.TestCase.class, - StorageNameGenerator.TestCase.class, - StorageProvisionerImpl.TestCase.class, - UniqueValueCounterTestCase.class, - XarTestPipelineJob.TestCase.class - ); - } - - @Override - public @NotNull Collection>> getIntegrationTestFactories() - { - List>> list = new ArrayList<>(super.getIntegrationTestFactories()); - list.add(new JspTestCase("/org/labkey/experiment/api/ExpDataClassDataTestCase.jsp")); - list.add(new JspTestCase("/org/labkey/experiment/api/ExpSampleTypeTestCase.jsp")); - return list; - } - - @Override - public @NotNull Set> getUnitTests() - { - return Set.of( - GraphAlgorithms.TestCase.class, - LSIDRelativizer.TestCase.class, - Lsid.TestCase.class, - LsidUtils.TestCase.class, - PropertyController.TestCase.class, - Quantity.TestCase.class, - Unit.TestCase.class - ); - } - - @Override - @NotNull - public Collection getSchemaNames() - { - return List.of( - ExpSchema.SCHEMA_NAME, - DataClassDomainKind.PROVISIONED_SCHEMA_NAME, - SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME - ); - } - - @NotNull - @Override - public Collection getProvisionedSchemaNames() - { - return PageFlowUtil.set(DataClassDomainKind.PROVISIONED_SCHEMA_NAME, SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME); - } - - @Override - public JSONObject getPageContextJson(ContainerUser context) - { - JSONObject json = super.getPageContextJson(context); - json.put(SAMPLE_FILES_TABLE, OptionalFeatureService.get().isFeatureEnabled(SAMPLE_FILES_TABLE)); - return json; - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment; + +import org.apache.commons.lang3.math.NumberUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONObject; +import org.labkey.api.admin.FolderSerializationRegistry; +import org.labkey.api.assay.AssayProvider; +import org.labkey.api.assay.AssayService; +import org.labkey.api.attachments.AttachmentService; +import org.labkey.api.audit.AuditLogService; +import org.labkey.api.audit.SampleTimelineAuditEvent; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerFilter; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbSchemaType; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.NameGenerator; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SimpleFilter.FilterClause; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpgradeCode; +import org.labkey.api.defaults.DefaultValueService; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.ExperimentRunType; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.DefaultExperimentDataHandler; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataClass; +import org.labkey.api.exp.api.ExpLineageService; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpProtocolAttachmentType; +import org.labkey.api.exp.api.ExpRunAttachmentType; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentJSONConverter; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.FilterProtocolInputCriteria; +import org.labkey.api.exp.api.SampleTypeDomainKind; +import org.labkey.api.exp.api.SampleTypeService; +import org.labkey.api.exp.api.StorageProvisioner; +import org.labkey.api.exp.property.DomainAuditProvider; +import org.labkey.api.exp.property.DomainPropertyAuditProvider; +import org.labkey.api.exp.property.ExperimentProperty; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.property.SystemProperty; +import org.labkey.api.exp.query.ExpDataClassTable; +import org.labkey.api.exp.query.ExpSampleTypeTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.exp.query.SamplesSchema; +import org.labkey.api.exp.xar.LSIDRelativizer; +import org.labkey.api.exp.xar.LsidUtils; +import org.labkey.api.files.FileContentService; +import org.labkey.api.files.TableUpdaterFileListener; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.ExperimentDeleteService; +import org.labkey.api.migration.MigrationTableHandler; +import org.labkey.api.module.ModuleContext; +import org.labkey.api.module.ModuleLoader; +import org.labkey.api.module.SpringModule; +import org.labkey.api.module.Summary; +import org.labkey.api.ontology.OntologyService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.ontology.Unit; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.FilteredTable; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.UserSchema; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.roles.RoleManager; +import org.labkey.api.settings.AppProps; +import org.labkey.api.settings.OptionalFeatureService; +import org.labkey.api.usageMetrics.UsageMetricsService; +import org.labkey.api.util.GUID; +import org.labkey.api.util.JspTestCase; +import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.SystemMaintenance; +import org.labkey.api.view.AlwaysAvailableWebPartFactory; +import org.labkey.api.view.BaseWebPartFactory; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.JspView; +import org.labkey.api.view.Portal; +import org.labkey.api.view.ViewContext; +import org.labkey.api.view.WebPartFactory; +import org.labkey.api.view.WebPartView; +import org.labkey.api.view.template.WarningService; +import org.labkey.api.vocabulary.security.DesignVocabularyPermission; +import org.labkey.api.webdav.WebdavResource; +import org.labkey.api.webdav.WebdavService; +import org.labkey.api.writer.ContainerUser; +import org.labkey.experiment.api.DataClassDomainKind; +import org.labkey.experiment.api.ExpDataClassImpl; +import org.labkey.experiment.api.ExpDataClassTableImpl; +import org.labkey.experiment.api.ExpDataClassType; +import org.labkey.experiment.api.ExpDataImpl; +import org.labkey.experiment.api.ExpDataTableImpl; +import org.labkey.experiment.api.ExpMaterialImpl; +import org.labkey.experiment.api.ExpProtocolImpl; +import org.labkey.experiment.api.ExpSampleTypeImpl; +import org.labkey.experiment.api.ExpSampleTypeTableImpl; +import org.labkey.experiment.api.ExperimentServiceImpl; +import org.labkey.experiment.api.ExperimentStressTest; +import org.labkey.experiment.api.GraphAlgorithms; +import org.labkey.experiment.api.LineageTest; +import org.labkey.experiment.api.LogDataType; +import org.labkey.experiment.api.Protocol; +import org.labkey.experiment.api.SampleTypeServiceImpl; +import org.labkey.experiment.api.SampleTypeUpdateServiceDI; +import org.labkey.experiment.api.UniqueValueCounterTestCase; +import org.labkey.experiment.api.VocabularyDomainKind; +import org.labkey.experiment.api.data.ChildOfCompareType; +import org.labkey.experiment.api.data.ChildOfMethod; +import org.labkey.experiment.api.data.LineageCompareType; +import org.labkey.experiment.api.data.ParentOfCompareType; +import org.labkey.experiment.api.data.ParentOfMethod; +import org.labkey.experiment.api.property.DomainImpl; +import org.labkey.experiment.api.property.DomainPropertyImpl; +import org.labkey.experiment.api.property.LengthValidator; +import org.labkey.experiment.api.property.LookupValidator; +import org.labkey.experiment.api.property.PropertyServiceImpl; +import org.labkey.experiment.api.property.RangeValidator; +import org.labkey.experiment.api.property.RegExValidator; +import org.labkey.experiment.api.property.StorageNameGenerator; +import org.labkey.experiment.api.property.StorageProvisionerImpl; +import org.labkey.experiment.api.property.TextChoiceValidator; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.experiment.controllers.property.PropertyController; +import org.labkey.experiment.defaults.DefaultValueServiceImpl; +import org.labkey.experiment.lineage.ExpLineageServiceImpl; +import org.labkey.experiment.lineage.LineagePerfTest; +import org.labkey.experiment.pipeline.ExperimentPipelineProvider; +import org.labkey.experiment.pipeline.XarTestPipelineJob; +import org.labkey.experiment.samples.DataClassFolderImporter; +import org.labkey.experiment.samples.DataClassFolderWriter; +import org.labkey.experiment.samples.SampleStatusFolderImporter; +import org.labkey.experiment.samples.SampleTimelineAuditProvider; +import org.labkey.experiment.samples.SampleTypeFolderImporter; +import org.labkey.experiment.samples.SampleTypeFolderWriter; +import org.labkey.experiment.security.DataClassDesignerRole; +import org.labkey.experiment.security.SampleTypeDesignerRole; +import org.labkey.experiment.types.TypesController; +import org.labkey.experiment.xar.FolderXarImporterFactory; +import org.labkey.experiment.xar.FolderXarWriterFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.labkey.api.data.ColumnRenderPropertiesImpl.STORAGE_UNIQUE_ID_CONCEPT_URI; +import static org.labkey.api.data.ColumnRenderPropertiesImpl.TEXT_CHOICE_CONCEPT_URI; +import static org.labkey.api.exp.api.ExperimentService.MODULE_NAME; +import static org.labkey.api.exp.query.ExpSchema.SAMPLE_FILES_TABLE; + +public class ExperimentModule extends SpringModule +{ + private static final String SAMPLE_TYPE_WEB_PART_NAME = "Sample Types"; + private static final String PROTOCOL_WEB_PART_NAME = "Protocols"; + + public static final String AMOUNT_AND_UNIT_UPGRADE_PROP = "AmountAndUnitAudit"; + public static final String TRANSACTION_ID_PROP = "AuditTransactionId"; + public static final String AUDIT_COUNT_PROP = "AuditRecordCount"; + public static final String EXPERIMENT_RUN_WEB_PART_NAME = "Experiment Runs"; + + @Override + public String getName() + { + return MODULE_NAME; + } + + @Override + public Double getSchemaVersion() + { + return 26.005; + } + + @Nullable + @Override + public UpgradeCode getUpgradeCode() + { + return new ExperimentUpgradeCode(); + } + + @Override + protected void init() + { + addController("experiment", ExperimentController.class); + addController("experiment-types", TypesController.class); + addController("property", PropertyController.class); + ExperimentService.setInstance(new ExperimentServiceImpl()); + SampleTypeService.setInstance(new SampleTypeServiceImpl()); + DefaultValueService.setInstance(new DefaultValueServiceImpl()); + StorageProvisioner.setInstance(StorageProvisionerImpl.get()); + ExpLineageService.setInstance(new ExpLineageServiceImpl()); + + PropertyServiceImpl propertyServiceImpl = new PropertyServiceImpl(); + PropertyService.setInstance(propertyServiceImpl); + UsageMetricsService.get().registerUsageMetrics(getName(), propertyServiceImpl); + + UsageMetricsService.get().registerUsageMetrics(getName(), FileLinkMetricsProvider.getInstance()); + + ExperimentProperty.register(); + SamplesSchema.register(this); + ExpSchema.register(this); + + PropertyService.get().registerDomainKind(new SampleTypeDomainKind()); + PropertyService.get().registerDomainKind(new DataClassDomainKind()); + PropertyService.get().registerDomainKind(new VocabularyDomainKind()); + + QueryService.get().addCompareType(new ChildOfCompareType()); + QueryService.get().addCompareType(new ParentOfCompareType()); + QueryService.get().addCompareType(new LineageCompareType()); + QueryService.get().registerMethod(ChildOfMethod.NAME, new ChildOfMethod(), JdbcType.BOOLEAN, 2, 3); + QueryService.get().registerMethod(ParentOfMethod.NAME, new ParentOfMethod(), JdbcType.BOOLEAN, 2, 3); + QueryService.get().addQueryListener(new ExperimentQueryChangeListener()); + QueryService.get().addQueryListener(new PropertyQueryChangeListener()); + + PropertyService.get().registerValidatorKind(new RegExValidator()); + PropertyService.get().registerValidatorKind(new RangeValidator()); + PropertyService.get().registerValidatorKind(new LookupValidator()); + PropertyService.get().registerValidatorKind(new LengthValidator()); + PropertyService.get().registerValidatorKind(new TextChoiceValidator()); + + ExperimentService.get().registerExperimentDataHandler(new DefaultExperimentDataHandler()); + ExperimentService.get().registerProtocolInputCriteria(new FilterProtocolInputCriteria.Factory()); + ExperimentService.get().registerNameExpressionType("sampletype", "exp", "MaterialSource", "nameexpression"); + ExperimentService.get().registerNameExpressionType("aliquots", "exp", "MaterialSource", "aliquotnameexpression"); + ExperimentService.get().registerNameExpressionType("dataclass", "exp", "DataClass", "nameexpression"); + + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.EXPERIMENTAL_RESOLVE_PROPERTY_URI_COLUMNS, "Resolve property URIs as columns on experiment tables", + "If a column is not found on an experiment table, attempt to resolve the column name as a Property URI and add it as a property column", false); + if (CoreSchema.getInstance().getSqlDialect().isSqlServer()) + { + OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_WITH_COUNTER, "Use strict incremental withCounter and rootSampleCount expression", + "When withCounter or rootSampleCount is used in name expression, make sure the count increments one-by-one and does not jump.", true); + } + else + { + OptionalFeatureService.get().addExperimentalFeatureFlag(SAMPLE_FILES_TABLE, "Manage Unreferenced Sample Files", + "Enable 'Unreferenced Sample Files' table to view and delete sample files that are no longer referenced by samples", false); + + OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_ALLOW_GAP_COUNTER, "Allow gap with withCounter and rootSampleCount expression", + "Check this option if gaps in the count generated by withCounter or rootSampleCount name expression are allowed.", true); + + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.MULTI_VALUE_TEXT_CHOICE, "Allow multi-value Text Choice properties", + "Support selecting more than one value for text choice fields", false); + } + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.QUANTITY_COLUMN_SUFFIX_TESTING, "Quantity column suffix testing", + "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", + "Support for querying lineage of experiment objects", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE, "Allow RowId to be accepted when merging samples or dataclass data", + "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); + + RoleManager.registerPermission(new DesignVocabularyPermission(), true); + RoleManager.registerRole(new SampleTypeDesignerRole()); + RoleManager.registerRole(new DataClassDesignerRole()); + + AttachmentService.get().registerAttachmentParentType(ExpRunAttachmentType.get()); + AttachmentService.get().registerAttachmentParentType(ExpProtocolAttachmentType.get()); + + WebdavService.get().addExpDataProvider((path, container) -> ExperimentService.get().getAllExpDataByURL(path, container)); + ExperimentService.get().registerObjectReferencer(ExperimentServiceImpl.get()); + + addModuleProperty(new LineageMaximumDepthModuleProperty(this)); + WarningService.get().register(new ExperimentWarningProvider()); + } + + @Override + public boolean hasScripts() + { + return true; + } + + @Override + @NotNull + protected Collection createWebPartFactories() + { + List result = new ArrayList<>(); + + BaseWebPartFactory runGroupsFactory = new BaseWebPartFactory(RunGroupWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new RunGroupWebPart(portalCtx, WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), webPart); + } + }; + runGroupsFactory.addLegacyNames("Experiments", "Experiment", "Experiment Navigator", "Narrow Experiments"); + result.add(runGroupsFactory); + + BaseWebPartFactory runTypesFactory = new BaseWebPartFactory(RunTypeWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new RunTypeWebPart(); + } + }; + result.add(runTypesFactory); + + result.add(new ExperimentRunWebPartFactory()); + BaseWebPartFactory sampleTypeFactory = new BaseWebPartFactory(SAMPLE_TYPE_WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new SampleTypeWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); + } + }; + sampleTypeFactory.addLegacyNames("Narrow Sample Sets", "Sample Sets"); + result.add(sampleTypeFactory); + result.add(new AlwaysAvailableWebPartFactory("Samples Menu", false, false, WebPartFactory.LOCATION_MENUBAR) { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + WebPartView view = new JspView<>("/org/labkey/experiment/samplesAndAnalytes.jsp", webPart); + view.setTitle("Samples"); + return view; + } + }); + + result.add(new AlwaysAvailableWebPartFactory("Data Classes", false, false, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new DataClassWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx, webPart); + } + }); + + BaseWebPartFactory narrowProtocolFactory = new BaseWebPartFactory(PROTOCOL_WEB_PART_NAME, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new ProtocolWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); + } + }; + narrowProtocolFactory.addLegacyNames("Narrow Protocols"); + result.add(narrowProtocolFactory); + + return result; + } + + private void addDataResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver() + { + @Override + public WebdavResource resolve(@NotNull String resourceIdentifier) + { + ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); + if (data == null) + return null; + + return data.createIndexDocument(null); + } + + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); + if (data == null) + return null; + + return ExperimentJSONConverter.serializeData(data, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); + } + + @Override + public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) + { + Map idDataMap = ExpDataImpl.fromDocumentIds(resourceIdentifiers); + if (idDataMap == null) + return null; + + Map> searchJsonMap = new HashMap<>(); + for (String resourceIdentifier : idDataMap.keySet()) + searchJsonMap.put(resourceIdentifier, ExperimentJSONConverter.serializeData(idDataMap.get(resourceIdentifier), user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap()); + return searchJsonMap; + } + }); + } + + private void addDataClassResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpDataClass dataClass = ExperimentService.get().getDataClass(rowId); + if (dataClass == null) + return null; + + Map properties = ExperimentJSONConverter.serializeExpObject(dataClass, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); + + //Need to map to proper Icon + properties.put("type", "dataClass" + (dataClass.getCategory() != null ? ":" + dataClass.getCategory() : "")); + + return properties; + } + }); + } + + private void addSampleTypeResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpSampleType sampleType = SampleTypeService.get().getSampleType(rowId); + if (sampleType == null) + return null; + + Map properties = ExperimentJSONConverter.serializeExpObject(sampleType, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); + + //Need to map to proper Icon + properties.put("type", "sampleSet"); + + return properties; + } + }); + } + + private void addSampleResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpMaterial material = ExperimentService.get().getExpMaterial(rowId); + if (material == null) + return null; + + return ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); + } + + @Override + public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) + { + Set rowIds = new HashSet<>(); + Map rowIdIdentifierMap = new LongHashMap<>(); + for (String resourceIdentifier : resourceIdentifiers) + { + long rowId = NumberUtils.toLong(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId != 0) + { + rowIds.add(rowId); + rowIdIdentifierMap.put(rowId, resourceIdentifier); + } + } + + Map> searchJsonMap = new HashMap<>(); + for (ExpMaterial material : ExperimentService.get().getExpMaterials(rowIds)) + { + searchJsonMap.put( + rowIdIdentifierMap.get(material.getRowId()), + ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap() + ); + } + + return searchJsonMap; + } + }); + } + + @Override + protected void startupAfterSpringConfig(ModuleContext moduleContext) + { + SearchService ss = SearchService.get(); +// ss.addSearchCategory(OntologyManager.conceptCategory); + ss.addSearchCategory(ExpSampleTypeImpl.searchCategory); + ss.addSearchCategory(ExpSampleTypeImpl.mediaSearchCategory); + ss.addSearchCategory(ExpMaterialImpl.searchCategory); + ss.addSearchCategory(ExpMaterialImpl.mediaSearchCategory); + ss.addSearchCategory(ExpDataClassImpl.SEARCH_CATEGORY); + ss.addSearchCategory(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY); + ss.addSearchCategory(ExpDataImpl.expDataCategory); + ss.addSearchCategory(ExpDataImpl.expMediaDataCategory); + ss.addSearchResultTemplate(new ExpDataImpl.DataSearchResultTemplate()); + addDataResourceResolver(ExpDataImpl.expDataCategory.getName()); + addDataResourceResolver(ExpDataImpl.expMediaDataCategory.getName()); + addDataClassResourceResolver(ExpDataClassImpl.SEARCH_CATEGORY.getName()); + addDataClassResourceResolver(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY.getName()); + addSampleTypeResourceResolver(ExpSampleTypeImpl.searchCategory.getName()); + addSampleTypeResourceResolver(ExpSampleTypeImpl.mediaSearchCategory.getName()); + addSampleResourceResolver(ExpMaterialImpl.searchCategory.getName()); + addSampleResourceResolver(ExpMaterialImpl.mediaSearchCategory.getName()); + ss.addDocumentProvider(ExperimentServiceImpl.get()); + + PipelineService.get().registerPipelineProvider(new ExperimentPipelineProvider(this)); + ExperimentService.get().registerExperimentRunTypeSource(container -> Collections.singleton(ExperimentRunType.ALL_RUNS_TYPE)); + ExperimentService.get().registerDataType(new LogDataType()); + + AuditLogService.get().registerAuditType(new DomainAuditProvider()); + AuditLogService.get().registerAuditType(new DomainPropertyAuditProvider()); + AuditLogService.get().registerAuditType(new ExperimentAuditProvider()); + AuditLogService.get().registerAuditType(new SampleTypeAuditProvider()); + AuditLogService.get().registerAuditType(new SampleTimelineAuditProvider()); + + FileContentService fileContentService = FileContentService.get(); + if (null != fileContentService) + { + fileContentService.addFileListener(new ExpDataFileListener()); + fileContentService.addFileListener(new TableUpdaterFileListener(ExperimentService.get().getTinfoExperimentRun(), "FilePathRoot", TableUpdaterFileListener.Type.fileRootPath, "RowId")); + fileContentService.addFileListener(new FileLinkFileListener()); + } + ContainerManager.addContainerListener(new ContainerManager.ContainerListener() + { + @Override + public void containerDeleted(Container c, User user) + { + try + { + ExperimentService.get().deleteAllExpObjInContainer(c, user); + } + catch (ExperimentException ee) + { + throw new RuntimeException(ee); + } + } + }, + // This is in the Last group because when a container is deleted, + // the Experiment listener needs to be called after the Study listener, + // because Study needs the metadata held by Experiment to delete properly. + // but it should be before the CoreContainerListener + ContainerManager.ContainerListener.Order.Last); + + if (ModuleLoader.getInstance().shouldInsertData()) + SystemProperty.registerProperties(); + + FolderSerializationRegistry folderRegistry = FolderSerializationRegistry.get(); + if (null != folderRegistry) + { + folderRegistry.addFactories(new FolderXarWriterFactory(), new FolderXarImporterFactory()); + folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDesignWriter.Factory()); + folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDataWriter.Factory()); + folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDesignWriter.Factory()); + folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDataWriter.Factory()); + folderRegistry.addImportFactory(new SampleTypeFolderImporter.Factory()); + folderRegistry.addImportFactory(new DataClassFolderImporter.Factory()); + folderRegistry.addImportFactory(new SampleStatusFolderImporter.Factory()); + } + + AttachmentService.get().registerAttachmentParentType(ExpDataClassType.get()); + + WebdavService.get().addProvider(new ScriptsResourceProvider()); + + SystemMaintenance.addTask(new FileLinkMetricsMaintenanceTask()); + + UsageMetricsService svc = UsageMetricsService.get(); + if (null != svc) + { + svc.registerUsageMetrics(getName(), () -> { + Map results = new HashMap<>(); + + DbSchema schema = ExperimentService.get().getSchema(); + if (AssayService.get() != null) + { + Map assayMetrics = new HashMap<>(); + SQLFragment baseRunSQL = new SQLFragment("SELECT COUNT(*) FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "r").append(" WHERE lsid LIKE ?"); + SQLFragment baseProtocolSQL = new SQLFragment("SELECT * FROM ").append(ExperimentService.get().getTinfoProtocol(), "p").append(" WHERE lsid LIKE ? AND ApplicationType = ?"); + for (AssayProvider assayProvider : AssayService.get().getAssayProviders()) + { + Map protocolMetrics = new HashMap<>(); + + // Run count across all assay designs of this type + SQLFragment runSQL = new SQLFragment(baseRunSQL); + runSQL.add(Lsid.namespaceLikeString(assayProvider.getRunLSIDPrefix())); + protocolMetrics.put("runCount", new SqlSelector(schema, runSQL).getObject(Long.class)); + + // Number of assay designs of this type + SQLFragment protocolSQL = new SQLFragment(baseProtocolSQL); + protocolSQL.add(assayProvider.getProtocolPattern()); + protocolSQL.add(ExpProtocol.ApplicationType.ExperimentRun.toString()); + List protocols = new SqlSelector(schema, protocolSQL).getArrayList(Protocol.class); + protocolMetrics.put("protocolCount", protocols.size()); + + List wrappedProtocols = protocols.stream().map(ExpProtocolImpl::new).collect(Collectors.toList()); + + protocolMetrics.put("resultRowCount", assayProvider.getResultRowCount(wrappedProtocols)); + + // Primary implementation class + protocolMetrics.put("implementingClass", assayProvider.getClass()); + + assayMetrics.put(assayProvider.getName(), protocolMetrics); + } + assayMetrics.put("autoLinkedAssayCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.propertyuri = 'terms.labkey.org#AutoCopyTargetContainer'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptRunOnEditCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptRunOnImportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); + + assayMetrics.put("standardAssayWithPlateSupportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'PlateMetadata' AND floatValue = 1").getObject(Long.class)); + SQLFragment runsWithPlateSQL = new SQLFragment(""" + SELECT COUNT(*) FROM exp.experimentrun r + INNER JOIN exp.object o ON o.objectUri = r.lsid + INNER JOIN exp.objectproperty op ON op.objectId = o.objectId + WHERE op.propertyid IN ( + SELECT propertyid FROM exp.propertydescriptor WHERE name = ? AND lookupquery = ? + )"""); + assayMetrics.put("standardAssayRunsWithPlateTemplate", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateTemplate").add("PlateTemplate")).getObject(Long.class)); + assayMetrics.put("standardAssayRunsWithPlateSet", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateSet").add("PlateSet")).getObject(Long.class)); + + assayMetrics.put("assayRunsFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + + assayMetrics.put("assayResultsFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + + // metric to count the number of Luminex and Standard assay runs that were imported with > 1 data file + assayMetrics.put("assayRunsWithMultipleInputFiles", new SqlSelector(schema, """ + SELECT COUNT(*) FROM ( + SELECT sourceapplicationid, COUNT(*) AS count FROM exp.data + WHERE lsid NOT LIKE '%:RelatedFile.%' AND sourceapplicationid IN ( + SELECT rowid FROM exp.protocolapplication + WHERE lsid LIKE '%:SimpleProtocol.CoreStep' AND (protocollsid LIKE '%:LuminexAssayProtocol.%' OR protocollsid LIKE '%:GeneralAssayProtocol.%') + ) + GROUP BY sourceapplicationid + ) x WHERE count > 1""").getObject(Long.class)); + + Map sampleLookupCountMetrics = new HashMap<>(); + SQLFragment baseAssaySampleLookupSQL = new SQLFragment("SELECT COUNT(*) FROM exp.propertydescriptor WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) AND propertyuri LIKE ?"); + + SQLFragment batchAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + batchAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Batch.getPrefix() + ".%"); + sampleLookupCountMetrics.put("batchDomain", new SqlSelector(schema, batchAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment runAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + runAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%"); + sampleLookupCountMetrics.put("runDomain", new SqlSelector(schema, runAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment resultAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + resultAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); + sampleLookupCountMetrics.put("resultDomain", new SqlSelector(schema, resultAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment resultAssayMultipleSampleLookupSQL = new SQLFragment( + """ + SELECT COUNT(*) FROM ( + SELECT PD.domainid, COUNT(*) AS PropCount + FROM exp.propertydescriptor D + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) + AND propertyuri LIKE ? + GROUP BY PD.domainid + ) X WHERE X.PropCount > 1""" + ); + resultAssayMultipleSampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); + sampleLookupCountMetrics.put("resultDomainWithMultiple", new SqlSelector(schema, resultAssayMultipleSampleLookupSQL).getObject(Long.class)); + + assayMetrics.put("sampleLookupCount", sampleLookupCountMetrics); + + + // Putting these metrics at the same level as the other BooleanColumnCount metrics (e.g., sampleTypeWithBooleanColumnCount) + results.put("assayResultWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + + results.put("assayRunWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + + results.put("assay", assayMetrics); + } + + results.put("autoLinkedSampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource WHERE autoLinkTargetContainer IS NOT NULL").getObject(Long.class)); + results.put("sampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource").getObject(Long.class)); + + if (schema.getSqlDialect().isPostgreSQL()) // SQLServer does not support regular expression queries + { + Collection> numSampleCounts = new SqlSelector(schema, """ + SELECT totalCount, numberNameCount FROM + (SELECT cpastype, COUNT(*) AS totalCount from exp.material GROUP BY cpastype) t + JOIN + (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.material m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns + ON t.cpastype = ns.cpastype""").getMapCollection(); + results.put("sampleSetWithNumberNamesCount", numSampleCounts.size()); + results.put("sampleSetWithOnlyNumberNamesCount", numSampleCounts.stream().filter( + map -> (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount") + ).count()); + } + UserSchema userSchema = AuditLogService.getAuditLogSchema(User.getSearchUser(), ContainerManager.getRoot()); + FilteredTable table = (FilteredTable) userSchema.getTable(SampleTimelineAuditEvent.EVENT_TYPE); + + SQLFragment sql = new SQLFragment("SELECT COUNT(*)\n" + + " FROM (\n" + + " -- updates that are marked as lineage updates\n" + + " (SELECT DISTINCT transactionId\n" + + " FROM " + table.getRealTable().getFromSQL("").getSQL() +"\n" + + " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanTRUE() + "\n" + + " AND comment = 'Sample was updated.'\n" + + " ) a1\n" + + " JOIN\n" + + " -- but have associated entries that are not lineage updates\n" + + " (SELECT DISTINCT transactionid\n" + + " FROM " + table.getRealTable().getFromSQL("").getSQL() + "\n" + + " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanFALSE() + ") a2\n" + + " ON a1.transactionid = a2.transactionid\n" + + " )"); + + results.put("sampleLineageAuditDiscrepancyCount", new SqlSelector(schema, sql.getSQL()).getObject(Long.class)); + + results.put("sampleCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material").getObject(Long.class)); + results.put("aliquotCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material where aliquotedfromlsid IS NOT NULL").getObject(Long.class)); + results.put("sampleNullAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount IS NULL").getObject(Long.class)); + results.put("sampleNegativeAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount < 0").getObject(Long.class)); + results.put("sampleUnitsDifferCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.material m JOIN exp.materialSource s ON m.materialsourceid = s.rowid WHERE m.units != s.metricunit").getObject(Long.class)); + results.put("sampleTypesWithoutUnitsCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IS NULL").getObject(Long.class)); + results.put("sampleTypesWithMassTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('kg', 'g', 'mg', 'ug', 'ng')").getObject(Long.class)); + results.put("sampleTypesWithVolumeTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('L', 'mL', 'uL')").getObject(Long.class)); + results.put("sampleTypesWithCountTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit = ?", "unit").getObject(Long.class)); + + results.put("duplicateSampleMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + + "(SELECT name, cpastype FROM exp.material WHERE cpastype <> 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); + results.put("duplicateSpecimenMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + + "(SELECT name, cpastype FROM exp.material WHERE cpastype = 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); + String duplicateCaseInsensitiveSampleNameCountSql = """ + SELECT COUNT(*) FROM + ( + SELECT 1 AS found + FROM exp.material + WHERE materialsourceid IS NOT NULL + GROUP BY LOWER(name), materialsourceid + HAVING COUNT(*) > 1 + ) AS duplicates + """; + String duplicateCaseInsensitiveDataNameCountSql = """ + SELECT COUNT(*) FROM + ( + SELECT 1 AS found + FROM exp.data + WHERE classid IS NOT NULL + GROUP BY LOWER(name), classid + HAVING COUNT(*) > 1 + ) AS duplicates + """; + results.put("duplicateCaseInsensitiveSampleNameCount", new SqlSelector(schema, duplicateCaseInsensitiveSampleNameCountSql).getObject(Long.class)); + results.put("duplicateCaseInsensitiveDataNameCount", new SqlSelector(schema, duplicateCaseInsensitiveDataNameCountSql).getObject(Long.class)); + + results.put("dataClassCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.dataclass").getObject(Long.class)); + results.put("dataClassRowCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.data WHERE classid IN (SELECT rowid FROM exp.dataclass)").getObject(Long.class)); + results.put("dataWithDataParentsCount", new SqlSelector(schema, "SELECT COUNT(DISTINCT d.sourceApplicationId) FROM exp.data d\n" + + "JOIN exp.datainput di ON di.targetapplicationid = d.sourceapplicationid").getObject(Long.class)); + if (schema.getSqlDialect().isPostgreSQL()) + { + Collection> numDataClassObjectsCounts = new SqlSelector(schema, """ + SELECT totalCount, numberNameCount FROM + (SELECT cpastype, COUNT(*) AS totalCount from exp.data GROUP BY cpastype) t + JOIN + (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.data m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns + ON t.cpastype = ns.cpastype""").getMapCollection(); + results.put("dataClassWithNumberNamesCount", numDataClassObjectsCounts.size()); + results.put("dataClassWithOnlyNumberNamesCount", numDataClassObjectsCounts.stream().filter(map -> + (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount")).count()); + } + + results.put("ontologyPrincipalConceptCodeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE principalconceptcode IS NOT NULL").getObject(Long.class)); + results.put("ontologyLookupColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", OntologyService.conceptCodeConceptURI).getObject(Long.class)); + results.put("ontologyConceptSubtreeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptsubtree IS NOT NULL").getObject(Long.class)); + results.put("ontologyConceptImportColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptimportcolumn IS NOT NULL").getObject(Long.class)); + results.put("ontologyConceptLabelColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptlabelcolumn IS NOT NULL").getObject(Long.class)); + + results.put("scannableColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE scannable = ?", true).getObject(Long.class)); + results.put("uniqueIdColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); + results.put("sampleTypeWithUniqueIdCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.conceptURI = ?""", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); + + results.put("fileColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithMultiValueColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("sampleTypeAliquotSpecificField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ChildOnly.name()).getObject(Long.class)); + results.put("sampleTypeParentOnlyField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND (D.derivationDataScope = ? OR D.derivationDataScope IS NULL)""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ParentOnly.name()).getObject(Long.class)); + results.put("sampleTypeParentAndAliquotField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.All.name()).getObject(Long.class)); + + results.put("attachmentColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithAttachmentColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithMultiValueColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("textChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", TEXT_CHOICE_CONCEPT_URI).getObject(Long.class)); + results.put("multiValueTextChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeuri = ?", PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithDateTimeColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.DATE_TIME.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithDateColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.DATE.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithTimeColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.TIME.getTypeUri()).getObject(Long.class)); + + results.put("maxObjectObjectId", new SqlSelector(schema, "SELECT MAX(ObjectId) FROM exp.Object").getObject(Long.class)); + results.put("maxMaterialRowId", new SqlSelector(schema, "SELECT MAX(RowId) FROM exp.Material").getObject(Long.class)); + + results.putAll(ExperimentService.get().getDomainMetrics()); + + return results; + }); + } + } + + @Override + public void registerMigrationHandlers(@NotNull DatabaseMigrationService service) + { + ExperimentMigrationSchemaHandler handler = new ExperimentMigrationSchemaHandler(); + service.registerSchemaHandler(handler); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("premium", DbSchemaType.Bare).getTable("Exclusions"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("premium", DbSchemaType.Bare).getTable("ExclusionMaps"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("ExclusionId", "RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("assayrequest", DbSchemaType.Bare).getTable("RequestRunsJunction"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerSchemaHandler(new SampleTypeMigrationSchemaHandler()); + DataClassMigrationSchemaHandler dcHandler = new DataClassMigrationSchemaHandler(); + service.registerSchemaHandler(dcHandler); + ExperimentDeleteService.setInstance(dcHandler); + } + + @Override + @NotNull + public Collection getSummary(Container c) + { + Collection list = new LinkedList<>(); + int runGroupCount = ExperimentService.get().getExperiments(c, null, false, true).size(); + if (runGroupCount > 0) + list.add(StringUtilsLabKey.pluralize(runGroupCount, "Run Group")); + + User user = HttpView.currentContext().getUser(); + + Set runTypes = ExperimentService.get().getExperimentRunTypes(c); + for (ExperimentRunType runType : runTypes) + { + if (runType == ExperimentRunType.ALL_RUNS_TYPE) + continue; + + long runCount = runType.getRunCount(user, c); + if (runCount > 0) + list.add(runCount + " runs of type " + runType.getDescription()); + } + + int dataClassCount = ExperimentService.get().getDataClasses(c, false).size(); + if (dataClassCount > 0) + list.add(dataClassCount + " Data Class" + (dataClassCount > 1 ? "es" : "")); + + int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); + if (sampleTypeCount > 0) + list.add(sampleTypeCount + " Sample Type" + (sampleTypeCount > 1 ? "s" : "")); + + return list; + } + + @Override + public @NotNull ArrayList getDetailedSummary(Container c, User user) + { + ArrayList summaries = new ArrayList<>(); + + // Assay types + long assayTypeCount = AssayService.get().getAssayProtocols(c).stream().filter(p -> p.getContainer().equals(c)).count(); + if (assayTypeCount > 0) + summaries.add(new Summary(assayTypeCount, "Assay Type")); + + // Run count + int runGroupCount = ExperimentService.get().getExperiments(c, user, false, true).size(); + if (runGroupCount > 0) + summaries.add(new Summary(runGroupCount, "Assay run")); + + // Number of Data Classes + List dataClasses = ExperimentService.get().getDataClasses(c, false); + int dataClassCount = dataClasses.size(); + if (dataClassCount > 0) + summaries.add(new Summary(dataClassCount, "Data Class")); + + ExpSchema expSchema = new ExpSchema(user, c); + + // Individual Data Class row counts + { + // The table-level container filter is set to ensure data class types are included + // that may not be defined in the target container but may have rows of data in the target container + TableInfo table = ExpSchema.TableType.DataClasses.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); + + // Issue 47919: The "DataCount" column is filtered to only count data in the target container + if (table instanceof ExpDataClassTableImpl tableImpl) + tableImpl.setDataCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); + + Set columns = new LinkedHashSet<>(); + columns.add(ExpDataClassTable.Column.Name.name()); + columns.add(ExpDataClassTable.Column.DataCount.name()); + + Map results = new TableSelector(table, columns).getValueMap(String.class); + for (var entry : results.entrySet()) + { + long count = entry.getValue().longValue(); + if (count > 0) + summaries.add(new Summary(count, entry.getKey())); + } + } + + // Sample Types + int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); + if (sampleTypeCount > 0) + summaries.add(new Summary(sampleTypeCount, "Sample Type")); + + // Individual Sample Type row counts + { + // The table-level container filter is set to ensure data class types are included + // that may not be defined in the target container but may have rows of data in the target container + TableInfo table = ExpSchema.TableType.SampleSets.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); + + // Issue 51557: The "SampleCount" column is filtered to only count data in the target container + if (table instanceof ExpSampleTypeTableImpl tableImpl) + tableImpl.setSampleCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); + + Set columns = new LinkedHashSet<>(); + columns.add(ExpSampleTypeTable.Column.Name.name()); + columns.add(ExpSampleTypeTable.Column.SampleCount.name()); + + Map results = new TableSelector(table, columns).getValueMap(String.class); + for (var entry : results.entrySet()) + { + long count = entry.getValue().longValue(); + if (count > 0) + { + String name = entry.getKey(); + Summary s = name.equals("MixtureBatches") + ? new Summary(count, "Batch") + : new Summary(count, name); + summaries.add(s); + } + } + } + + return summaries; + } + + @Override + public @NotNull Set> getIntegrationTests() + { + return Set.of( + DomainImpl.TestCase.class, + DomainPropertyImpl.TestCase.class, + ExpDataTableImpl.TestCase.class, + ExperimentServiceImpl.AuditDomainUriTest.class, + ExperimentServiceImpl.LineageQueryTestCase.class, + ExperimentServiceImpl.ParseInputOutputAliasTestCase.class, + ExperimentServiceImpl.TestCase.class, + ExperimentStressTest.class, + LineagePerfTest.class, + LineageTest.class, + OntologyManager.TestCase.class, + PropertyServiceImpl.TestCase.class, + SampleTypeServiceImpl.TestCase.class, + StorageNameGenerator.TestCase.class, + StorageProvisionerImpl.TestCase.class, + UniqueValueCounterTestCase.class, + XarTestPipelineJob.TestCase.class + ); + } + + @Override + public @NotNull Collection>> getIntegrationTestFactories() + { + List>> list = new ArrayList<>(super.getIntegrationTestFactories()); + list.add(new JspTestCase("/org/labkey/experiment/api/ExpDataClassDataTestCase.jsp")); + list.add(new JspTestCase("/org/labkey/experiment/api/ExpSampleTypeTestCase.jsp")); + return list; + } + + @Override + public @NotNull Set> getUnitTests() + { + return Set.of( + GraphAlgorithms.TestCase.class, + LSIDRelativizer.TestCase.class, + Lsid.TestCase.class, + LsidUtils.TestCase.class, + PropertyController.TestCase.class, + Quantity.TestCase.class, + Unit.TestCase.class + ); + } + + @Override + @NotNull + public Collection getSchemaNames() + { + return List.of( + ExpSchema.SCHEMA_NAME, + DataClassDomainKind.PROVISIONED_SCHEMA_NAME, + SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME + ); + } + + @NotNull + @Override + public Collection getProvisionedSchemaNames() + { + return PageFlowUtil.set(DataClassDomainKind.PROVISIONED_SCHEMA_NAME, SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME); + } + + @Override + public JSONObject getPageContextJson(ContainerUser context) + { + JSONObject json = super.getPageContextJson(context); + json.put(SAMPLE_FILES_TABLE, OptionalFeatureService.get().isFeatureEnabled(SAMPLE_FILES_TABLE)); + return json; + } +} diff --git a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java index 7214de841e8..004be996f2e 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java @@ -1,985 +1,985 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.experiment.api; - -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONObject; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.SqlSelector; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.exp.ExperimentDataHandler; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.Handler; -import org.labkey.api.exp.ObjectProperty; -import org.labkey.api.exp.XarFormatException; -import org.labkey.api.exp.XarSource; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataClass; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.query.ExpDataClassDataTable; -import org.labkey.api.exp.query.ExpDataTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.files.FileContentService; -import org.labkey.api.pipeline.PipeRoot; -import org.labkey.api.pipeline.PipelineJob; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.QueryRowReference; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.ValidationException; -import org.labkey.api.search.SearchResultTemplate; -import org.labkey.api.search.SearchScope; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DataClassReadPermission; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.MediaReadPermission; -import org.labkey.api.security.permissions.MoveEntitiesPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.GUID; -import org.labkey.api.util.HtmlString; -import org.labkey.api.util.LinkBuilder; -import org.labkey.api.util.MimeMap; -import org.labkey.api.util.NetworkDrive; -import org.labkey.api.util.Pair; -import org.labkey.api.util.Path; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.URLHelper; -import org.labkey.api.util.InputBuilder; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.NavTree; -import org.labkey.api.view.ViewContext; -import org.labkey.api.webdav.SimpleDocumentResource; -import org.labkey.api.webdav.WebdavResource; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.vfs.FileLike; -import org.labkey.vfs.FileSystemLike; - -import java.io.File; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -import static org.labkey.api.exp.query.ExpSchema.SCHEMA_EXP_DATA; - -public class ExpDataImpl extends AbstractRunItemImpl implements ExpData -{ - public enum DataOperations - { - Edit("editing", UpdatePermission.class), - EditLineage("editing lineage", UpdatePermission.class), - Delete("deleting", DeletePermission.class), - Move("moving", MoveEntitiesPermission.class); - - private final String _description; // used as a suffix in messaging users about what is not allowed - private final Class _permissionClass; - - DataOperations(String description, Class permissionClass) - { - _description = description; - _permissionClass = permissionClass; - } - - public String getDescription() - { - return _description; - } - - public Class getPermissionClass() - { - return _permissionClass; - } - } - - public static final SearchService.SearchCategory expDataCategory = new SearchService.SearchCategory("data", "ExpData", false) { - @Override - public Set getPermittedContainerIds(User user, Map containers) - { - return getPermittedContainerIds(user, containers, DataClassReadPermission.class); - } - }; - public static final SearchService.SearchCategory expMediaDataCategory = new SearchService.SearchCategory("mediaData", "ExpData for media objects", false) { - @Override - public Set getPermittedContainerIds(User user, Map containers) - { - return getPermittedContainerIds(user, containers, MediaReadPermission.class); - } - }; - - /** Cache this because it can be expensive to recompute */ - private Boolean _finalRunOutput; - - /** - * Temporary mapping until experiment.xml contains the mime type - */ - private static final MimeMap MIME_MAP = new MimeMap(); - - static public List fromDatas(List datas) - { - List ret = new ArrayList<>(datas.size()); - for (Data data : datas) - { - ret.add(new ExpDataImpl(data)); - } - return ret; - } - - // For serialization - protected ExpDataImpl() {} - - public ExpDataImpl(Data data) - { - super(data); - } - - @Override - public void setComment(User user, String comment) throws ValidationException - { - setComment(user, comment, true); - } - - @Override - public void setComment(User user, String comment, boolean index) throws ValidationException - { - super.setComment(user, comment); - - if (index) - index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); - } - - @Override - @Nullable - public ActionURL detailsURL() - { - DataType dataType = getDataType(); - if (dataType != null) - { - ActionURL url = dataType.getDetailsURL(this); - if (url != null) - return url; - } - - return _object.detailsURL(); - } - - @Override - public @Nullable QueryRowReference getQueryRowReference() - { - return getQueryRowReference(null); - } - - @Override - public @Nullable QueryRowReference getQueryRowReference(@Nullable User user) - { - ExpDataClassImpl dc = getDataClass(user); - if (dc != null) - return new QueryRowReference(getContainer(), SCHEMA_EXP_DATA, dc.getName(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); - - // Issue 40123: see MedImmuneDataHandler MEDIMMUNE_DATA_TYPE, this claims the "Data" namespace - DataType type = getDataType(); - if (type != null) - { - QueryRowReference queryRowReference = type.getQueryRowReference(this); - if (queryRowReference != null) - return queryRowReference; - } - - return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Data.name(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); - } - - @Override - public List getTargetApplications() - { - return getTargetApplications(new SimpleFilter(FieldKey.fromParts("DataId"), getRowId()), ExperimentServiceImpl.get().getTinfoDataInput()); - } - - @Override - public List getTargetRuns() - { - return getTargetRuns(ExperimentServiceImpl.get().getTinfoDataInput(), "DataId"); - } - - @Override - public DataType getDataType() - { - return ExperimentService.get().getDataType(getLSIDNamespacePrefix()); - } - - @Override - public void setDataFileURI(URI uri) - { - ensureUnlocked(); - _object.setDataFileUrl(ExpData.normalizeDataFileURI(uri)); - } - - @Override - public void save(User user) - { - // Replace the default "Data" cpastype if the Data belongs to a DataClass - ExpDataClassImpl dataClass = getDataClass(); - if (dataClass != null && ExpData.DEFAULT_CPAS_TYPE.equals(getCpasType())) - setCpasType(dataClass.getLSID()); - - boolean isNew = getRowId() == 0; - save(user, ExperimentServiceImpl.get().getTinfoData(), true); - - if (isNew) - { - if (dataClass != null) - { - Map map = new HashMap<>(); - map.put("lsid", getLSID()); - Table.insert(user, dataClass.getTinfo(), map); - } - } - index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); - } - - @Override - protected void save(User user, TableInfo table, boolean ensureObject) - { - assert ensureObject; - super.save(user, table, true); - } - - @Override - public URI getDataFileURI() - { - String url = _object.getDataFileUrl(); - if (url == null) - return null; - try - { - return new URI(_object.getDataFileUrl()); - } - catch (URISyntaxException use) - { - return null; - } - } - - @Override - public ExperimentDataHandler findDataHandler() - { - return Handler.Priority.findBestHandler(ExperimentServiceImpl.get().getExperimentDataHandlers(), this); - } - - @Override - public String getDataFileUrl() - { - return _object.getDataFileUrl(); - } - - @Override - public boolean hasFileScheme() - { - return !FileUtil.hasCloudScheme(getDataFileUrl()); - } - - @Override - @Nullable - public File getFile() - { - return _object.getFile(); - } - - @Override - public @Nullable FileLike getFileLike() - { - return _object.getFileLike(); - } - - @Override - @Nullable - public java.nio.file.Path getFilePath() - { - return _object.getFilePath(); - } - - @Override - public boolean isInlineImage() - { - return null != getFile() && MIME_MAP.isInlineImageFor(getFile()); - } - - @Override - public void delete(User user) - { - delete(user, true); - } - - @Override - public void delete(User user, boolean deleteRunsUsingData) - { - ExperimentServiceImpl.get().deleteDataByRowIds(user, getContainer(), Collections.singleton(getRowId()), deleteRunsUsingData); - } - - public String getMimeType() - { - if (null != getDataFileUrl()) - return MIME_MAP.getContentTypeFor(getDataFileUrl()); - else - return null; - } - - @Override - public boolean isFileOnDisk() - { - java.nio.file.Path f = getFilePath(); - if (f != null) - if (!FileUtil.hasCloudScheme(f)) - return NetworkDrive.exists(f.toFile()) && !Files.isDirectory(f); - else - return Files.exists(f); - else - return false; - } - - public boolean isPathAccessible() - { - java.nio.file.Path path = getFilePath(); - return (null != path && Files.exists(path)); - } - - @Override - public String getCpasType() - { - String result = _object.getCpasType(); - if (result != null) - return result; - - ExpDataClass dataClass = getDataClass(); - if (dataClass != null) - return dataClass.getLSID(); - - return ExpData.DEFAULT_CPAS_TYPE; - } - - public void setGenerated(boolean generated) - { - ensureUnlocked(); - _object.setGenerated(generated); - } - - @Override - public boolean isGenerated() - { - return _object.isGenerated(); - } - - @Override - public boolean isFinalRunOutput() - { - if (_finalRunOutput == null) - { - ExpRun run = getRun(); - _finalRunOutput = run != null && run.isFinalOutput(this); - } - return _finalRunOutput.booleanValue(); - } - - @Override - @Nullable - public ExpDataClassImpl getDataClass() - { - return getDataClass(null); - } - - @Override - @Nullable - public ExpDataClassImpl getDataClass(@Nullable User user) - { - if (_object.getClassId() != null && getContainer() != null) - { - if (user == null) - return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId()); - else - return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId(), true); - } - - return null; - } - - @Override - public void importDataFile(PipelineJob job, XarSource xarSource) throws ExperimentException - { - String dataFileURL = getDataFileUrl(); - if (dataFileURL == null) - return; - - if (xarSource.shouldIgnoreDataFiles()) - { - job.debug("Skipping load of data file " + dataFileURL + " based on the XAR source"); - return; - } - - job.debug("Trying to load data file " + dataFileURL + " into the system"); - - java.nio.file.Path path = FileUtil.stringToPath(getContainer(), dataFileURL); - - if (!Files.exists(path)) - { - job.debug("Unable to find the data file " + FileUtil.getAbsolutePath(getContainer(), path) + " on disk."); - return; - } - - // Check that the file is under the pipeline root to prevent users from referencing a file that they - // don't have permission to import - PipeRoot pr = PipelineService.get().findPipelineRoot(job.getContainer()); - if (!xarSource.allowImport(pr, job.getContainer(), path)) - { - if (pr == null) - { - job.warn("No pipeline root was set, skipping load of file " + FileUtil.getAbsolutePath(getContainer(), path)); - return; - } - job.debug("The data file " + FileUtil.getAbsolutePath(getContainer(), path) + " is not under the folder's pipeline root: " + pr + ". It will not be loaded directly, but may be loaded if referenced from other files that are under the pipeline root."); - return; - } - - ExperimentDataHandler handler = findDataHandler(); - try - { - handler.importFile(this, FileSystemLike.wrapFile(path), job.getInfo(), job.getLogger(), xarSource.getXarContext()); - } - catch (ExperimentException e) - { - throw new XarFormatException(e); - } - - job.debug("Finished trying to load data file " + dataFileURL + " into the system"); - } - - // Get all text and int strings from the data class for indexing - private void getIndexValues( - Map props, - @NotNull ExpDataClassDataTableImpl table, - Set identifiersHi, - Set identifiersMed, - Set identifiersLo, - Set keywordHi, - Set keywordMed, - Set keywordsLo, - JSONObject jsonData - ) - { - CaseInsensitiveHashSet skipColumns = new CaseInsensitiveHashSet(); - for (ExpDataClassDataTable.Column column : ExpDataClassDataTable.Column.values()) - skipColumns.add(column.name()); - skipColumns.add("Ancestors"); - skipColumns.add("Container"); - - processIndexValues(props, table, skipColumns, identifiersHi, identifiersMed, identifiersLo, keywordHi, keywordMed, keywordsLo, jsonData); - } - - @Override - @NotNull - public Collection getAliases() - { - TableInfo mapTi = ExperimentService.get().getTinfoDataAliasMap(); - TableInfo ti = ExperimentService.get().getTinfoAlias(); - SQLFragment sql = new SQLFragment() - .append("SELECT a.name FROM ").append(mapTi, "m") - .append(" JOIN ").append(ti, "a") - .append(" ON m.alias = a.RowId WHERE m.lsid = ? "); - sql.add(getLSID()); - ArrayList aliases = new SqlSelector(mapTi.getSchema(), sql).getArrayList(String.class); - return Collections.unmodifiableList(aliases); - } - - @Override - public String getDocumentId() - { - String dataClassName = "-"; - ExpDataClass dc = getDataClass(); - if (dc != null) - dataClassName = dc.getName(); - // why not just data:rowId? - return "data:" + new Path(getContainer().getId(), dataClassName, Long.toString(getRowId())).encode(); - } - - @Override - protected TableSelector getObjectPropertiesSelector(@NotNull TableInfo table) - { - return new TableSelector(table, new SimpleFilter(ExpDataTable.Column.RowId.fieldKey(), getRowId()), null); - } - - @Override - public Map getObjectProperties() - { - return getObjectProperties(getDataClass()); - } - - @Override - public Map getObjectProperties(@Nullable User user) - { - return getObjectProperties(getDataClass(user)); - } - - private Map getObjectProperties(ExpDataClassImpl dataClass) - { - HashMap ret = new HashMap<>(super.getObjectProperties()); - var ti = null == dataClass ? null : dataClass.getTinfo(); - if (null != ti) - { - ret.putAll(getObjectProperties(ti)); - } - return ret; - } - - private static Pair getRowIdClassNameContainerFromDocumentId(String resourceIdentifier, Map dcCache) - { - if (resourceIdentifier.startsWith("data:")) - resourceIdentifier = resourceIdentifier.substring("data:".length()); - - Path path = Path.parse(resourceIdentifier); - if (path.size() != 3) - return null; - String containerId = path.get(0); - String dataClassName = path.get(1); - String rowIdString = path.get(2); - - long rowId; - try - { - rowId = Long.parseLong(rowIdString); - if (rowId == 0) - return null; - } - catch (NumberFormatException ex) - { - return null; - } - - Container c = ContainerManager.getForId(containerId); - if (c == null) - return null; - - ExpDataClass dc = null; - if (!StringUtils.isEmpty(dataClassName) && !dataClassName.equals("-")) - { - String dcKey = containerId + '-' + dataClassName; - dc = dcCache.computeIfAbsent(dcKey, (x) -> ExperimentServiceImpl.get().getDataClass(c, dataClassName)); - } - - return new Pair<>(rowId, dc); - } - - @Nullable - public static ExpDataImpl fromDocumentId(String resourceIdentifier) - { - Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, new HashMap<>()); - if (rowIdDataClass == null) - return null; - - Long rowId = rowIdDataClass.first; - ExpDataClass dc = rowIdDataClass.second; - - if (dc != null) - return ExperimentServiceImpl.get().getExpData(dc, rowId); - else - return ExperimentServiceImpl.get().getExpData(rowId); - } - - @Nullable - public static Map fromDocumentIds(Collection resourceIdentifiers) - { - Map rowIdIdentifierMap = new LongHashMap<>(); - Map dcCache = new HashMap<>(); - Map dcMap = new LongHashMap<>(); - Map> dcRowIdMap = new LongHashMap<>(); // data rowIds with dataClass - List rowIds = new ArrayList<>(); // data rowIds without dataClass - for (String resourceIdentifier : resourceIdentifiers) - { - Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, dcCache); - if (rowIdDataClass == null) - continue; - - Long rowId = rowIdDataClass.first; - ExpDataClass dc = rowIdDataClass.second; - - rowIdIdentifierMap.put(rowId, resourceIdentifier); - - if (dc != null) - { - dcMap.put(dc.getRowId(), dc); - dcRowIdMap - .computeIfAbsent(dc.getRowId(), (k) -> new ArrayList<>()) - .add(rowId); - } - else - rowIds.add(rowId); - } - - List expDatas = new ArrayList<>(); - if (!rowIds.isEmpty()) - expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(rowIds)); - - if (!dcRowIdMap.isEmpty()) - { - for (Long dataClassId : dcRowIdMap.keySet()) - { - ExpDataClass dc = dcMap.get(dataClassId); - if (dc != null) - expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(dc, dcRowIdMap.get(dataClassId))); - } - } - - Map identifierDatas = new HashMap<>(); - for (ExpData data : expDatas) - { - identifierDatas.put(rowIdIdentifierMap.get(data.getRowId()), data); - } - - return identifierDatas; - } - - @Override - public @Nullable URI getWebDavURL(@NotNull FileContentService.PathType type) - { - java.nio.file.Path path = getFilePath(); - if (path == null) - { - return null; - } - - Container c = getContainer(); - if (c == null) - { - return null; - } - - return FileContentService.get().getWebDavUrl(path, c, type); - } - - @Override - public @Nullable WebdavResource createIndexDocument(@Nullable TableInfo tableInfo) - { - Container container = getContainer(); - if (container == null) - return null; - - Map props = new HashMap<>(); - JSONObject jsonData = new JSONObject(); - Set keywordsHi = new HashSet<>(); - Set keywordsMed = new HashSet<>(); - Set keywordsLo = new HashSet<>(); - - Set identifiersHi = new HashSet<>(); - Set identifiersMed = new HashSet<>(); - Set identifiersLo = new HashSet<>(); - - StringBuilder body = new StringBuilder(); - - // Name is an identifier with the highest weight - identifiersHi.add(getName()); - keywordsMed.add(getName()); // also add to keywords since those are stemmed - - // Description is added as a keywordsLo -- in Biologics it is common for the description to - // contain names of other DataClasses, e.g., "Mature desK of PS-10", which would be tokenized as - // [mature, desk, ps, 10] if added it as a keyword so we lower its priority to avoid useless results. - // CONSIDER: tokenize the description and extract identifiers - if (null != getDescription()) - keywordsLo.add(getDescription()); - - String comment = getComment(); - if (comment != null) - keywordsMed.add(comment); - - // Add aliases in parentheses in the title - StringBuilder title = new StringBuilder(getName()); - Collection aliases = getAliases(); - if (!aliases.isEmpty()) - { - title.append(" (").append(StringUtils.join(aliases, ", ")).append(")"); - identifiersHi.addAll(aliases); - } - - ExpDataClassImpl dc = getDataClass(User.getSearchUser()); - if (dc != null) - { - ActionURL show = new ActionURL(ExperimentController.ShowDataClassAction.class, container).addParameter("rowId", dc.getRowId()); - NavTree t = new NavTree(dc.getName(), show); - String nav = NavTree.toJS(Collections.singleton(t), null, false, true).toString(); - props.put(SearchService.PROPERTY.navtrail.toString(), nav); - - props.put(DataSearchResultTemplate.PROPERTY, dc.getName()); - body.append(dc.getName()); - - if (tableInfo == null) - tableInfo = QueryService.get().getUserSchema(User.getSearchUser(), container, SCHEMA_EXP_DATA).getTable(dc.getName()); - - if (!(tableInfo instanceof ExpDataClassDataTableImpl expDataClassDataTable)) - throw new IllegalArgumentException(String.format("Unable to index data class item in %s. Table must be an instance of %s", dc.getName(), ExpDataClassDataTableImpl.class.getName())); - - if (!expDataClassDataTable.getDataClass().equals(dc)) - throw new IllegalArgumentException(String.format("Data class table mismatch for %s", dc.getName())); - - // Collect other text columns and lookup display columns - getIndexValues(props, expDataClassDataTable, identifiersHi, identifiersMed, identifiersLo, keywordsHi, keywordsMed, keywordsLo, jsonData); - } - - // === Stored, not indexed - if (dc != null && dc.isMedia()) - props.put(SearchService.PROPERTY.categories.toString(), expMediaDataCategory.toString()); - else - props.put(SearchService.PROPERTY.categories.toString(), expDataCategory.toString()); - props.put(SearchService.PROPERTY.title.toString(), title.toString()); - props.put(SearchService.PROPERTY.jsonData.toString(), jsonData); - - ActionURL view = ExperimentController.ExperimentUrlsImpl.get().getDataDetailsURL(this); - view.setExtraPath(container.getId()); - String docId = getDocumentId(); - - // Generate a summary explicitly instead of relying on a summary to be extracted - // from the document body. Placing lookup values and the description in the body - // would tokenize using the English analyzer and index "PS-12" as ["ps", "12"] which leads to poor results. - StringBuilder summary = new StringBuilder(); - if (StringUtils.isNotEmpty(getDescription())) - summary.append(getDescription()).append("\n"); - - appendTokens(summary, keywordsMed); - appendTokens(summary, identifiersMed); - appendTokens(summary, identifiersLo); - - props.put(SearchService.PROPERTY.summary.toString(), summary); - - return new ExpDataResource( - getRowId(), - new Path(docId), - docId, - container.getEntityId(), - "text/plain", - body.toString(), - view, - props, - getCreatedBy(), - getCreated(), - getModifiedBy(), - getModified() - ); - } - - private static void appendTokens(StringBuilder sb, Collection toks) - { - if (toks.isEmpty()) - return; - - sb.append(toks.stream().map(s -> s.length() > 30 ? StringUtilsLabKey.leftSurrogatePairFriendly(s, 30) + "\u2026" : s).collect(Collectors.joining(", "))).append("\n"); - } - - private static class ExpDataResource extends SimpleDocumentResource - { - final long _rowId; - - public ExpDataResource(long rowId, Path path, String documentId, GUID containerId, String contentType, String body, URLHelper executeUrl, Map properties, User createdBy, Date created, User modifiedBy, Date modified) - { - super(path, documentId, containerId, contentType, body, executeUrl, createdBy, created, modifiedBy, modified, properties); - _rowId = rowId; - } - - @Override - public void setLastIndexed(long ms, long modified) - { - ExperimentServiceImpl.get().setDataLastIndexed(_rowId, ms); - } - } - - public static class DataSearchResultTemplate implements SearchResultTemplate - { - public static final String NAME = "data"; - public static final String PROPERTY = "dataclass"; - - @Nullable - @Override - public String getName() - { - return NAME; - } - - private ExpDataClass getDataClass() - { - if (HttpView.hasCurrentView()) - { - ViewContext ctx = HttpView.currentContext(); - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - return ExperimentService.get().getDataClass(ctx.getContainer(), dataclass, true); - } - return null; - } - - @Nullable - @Override - public String getCategories() - { - ExpDataClass dataClass = getDataClass(); - - if (dataClass != null && dataClass.isMedia()) - return expMediaDataCategory.getName(); - - return expDataCategory.getName(); - } - - @Nullable - @Override - public SearchScope getSearchScope() - { - return SearchScope.FolderAndSubfolders; - } - - @NotNull - @Override - public String getResultNameSingular() - { - ExpDataClass dc = getDataClass(); - if (dc != null) - return dc.getName(); - return "data"; - } - - @NotNull - @Override - public String getResultNamePlural() - { - return getResultNameSingular(); - } - - @Override - public boolean includeNavigationLinks() - { - return true; - } - - @Override - public boolean includeAdvanceUI() - { - return false; - } - - @Nullable - @Override - public HtmlString getExtraHtml(ViewContext ctx) - { - String q = ctx.getActionURL().getParameter("q"); - - if (StringUtils.isNotBlank(q)) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - ActionURL url = ctx.cloneActionURL().deleteParameter(PROPERTY); - url.replaceParameter(ActionURL.Param._dc, (int)Math.round(1000 * Math.random())); - - StringBuilder html = new StringBuilder(); - html.append("
"); - - appendParam(html, null, dataclass, "All", false, url); - for (ExpDataClass dc : ExperimentService.get().getDataClasses(ctx.getContainer(), true)) - { - appendParam(html, dc.getName(), dataclass, dc.getName(), true, url); - } - - html.append("
"); - return HtmlString.unsafe(html.toString()); - } - else - { - return null; - } - } - - private void appendParam(StringBuilder sb, @Nullable String dataclass, @Nullable String current, @NotNull String label, boolean addParam, ActionURL url) - { - sb.append(""); - - if (!Objects.equals(dataclass, current)) - { - if (addParam) - url = url.clone().addParameter(PROPERTY, dataclass); - - sb.append(LinkBuilder.simpleLink(label, url)); - } - else - { - sb.append(label); - } - - sb.append(" "); - } - - @Override - public HtmlString getHiddenInputsHtml(ViewContext ctx) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - { - return InputBuilder.hidden().id("search-type").name(PROPERTY).value(dataclass).getHtmlString(); - } - - return null; - } - - - @Override - public String reviseQuery(ViewContext ctx, String q) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - - if (null != dataclass) - return "+(" + q + ") +" + PROPERTY + ":" + dataclass; - else - return q; - } - - @Override - public void addNavTrail(NavTree root, ViewContext ctx, @NotNull SearchScope scope, @Nullable String category) - { - SearchResultTemplate.super.addNavTrail(root, ctx, scope, category); - - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - { - String text = root.getText(); - root.setText(text + " - " + dataclass); - } - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.experiment.api; + +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONObject; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.exp.ExperimentDataHandler; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.Handler; +import org.labkey.api.exp.ObjectProperty; +import org.labkey.api.exp.XarFormatException; +import org.labkey.api.exp.XarSource; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataClass; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.query.ExpDataClassDataTable; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.files.FileContentService; +import org.labkey.api.pipeline.PipeRoot; +import org.labkey.api.pipeline.PipelineJob; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.QueryRowReference; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.ValidationException; +import org.labkey.api.search.SearchResultTemplate; +import org.labkey.api.search.SearchScope; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DataClassReadPermission; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.MediaReadPermission; +import org.labkey.api.security.permissions.MoveEntitiesPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.GUID; +import org.labkey.api.util.HtmlString; +import org.labkey.api.util.LinkBuilder; +import org.labkey.api.util.MimeMap; +import org.labkey.api.util.NetworkDrive; +import org.labkey.api.util.Pair; +import org.labkey.api.util.Path; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.URLHelper; +import org.labkey.api.util.InputBuilder; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.NavTree; +import org.labkey.api.view.ViewContext; +import org.labkey.api.webdav.SimpleDocumentResource; +import org.labkey.api.webdav.WebdavResource; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; + +import java.io.File; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.labkey.api.exp.query.ExpSchema.SCHEMA_EXP_DATA; + +public class ExpDataImpl extends AbstractRunItemImpl implements ExpData +{ + public enum DataOperations + { + Edit("editing", UpdatePermission.class), + EditLineage("editing lineage", UpdatePermission.class), + Delete("deleting", DeletePermission.class), + Move("moving", MoveEntitiesPermission.class); + + private final String _description; // used as a suffix in messaging users about what is not allowed + private final Class _permissionClass; + + DataOperations(String description, Class permissionClass) + { + _description = description; + _permissionClass = permissionClass; + } + + public String getDescription() + { + return _description; + } + + public Class getPermissionClass() + { + return _permissionClass; + } + } + + public static final SearchService.SearchCategory expDataCategory = new SearchService.SearchCategory("data", "ExpData", false) { + @Override + public Set getPermittedContainerIds(User user, Map containers) + { + return getPermittedContainerIds(user, containers, DataClassReadPermission.class); + } + }; + public static final SearchService.SearchCategory expMediaDataCategory = new SearchService.SearchCategory("mediaData", "ExpData for media objects", false) { + @Override + public Set getPermittedContainerIds(User user, Map containers) + { + return getPermittedContainerIds(user, containers, MediaReadPermission.class); + } + }; + + /** Cache this because it can be expensive to recompute */ + private Boolean _finalRunOutput; + + /** + * Temporary mapping until experiment.xml contains the mime type + */ + private static final MimeMap MIME_MAP = new MimeMap(); + + static public List fromDatas(List datas) + { + List ret = new ArrayList<>(datas.size()); + for (Data data : datas) + { + ret.add(new ExpDataImpl(data)); + } + return ret; + } + + // For serialization + protected ExpDataImpl() {} + + public ExpDataImpl(Data data) + { + super(data); + } + + @Override + public void setComment(User user, String comment) throws ValidationException + { + setComment(user, comment, true); + } + + @Override + public void setComment(User user, String comment, boolean index) throws ValidationException + { + super.setComment(user, comment); + + if (index) + index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); + } + + @Override + @Nullable + public ActionURL detailsURL() + { + DataType dataType = getDataType(); + if (dataType != null) + { + ActionURL url = dataType.getDetailsURL(this); + if (url != null) + return url; + } + + return _object.detailsURL(); + } + + @Override + public @Nullable QueryRowReference getQueryRowReference() + { + return getQueryRowReference(null); + } + + @Override + public @Nullable QueryRowReference getQueryRowReference(@Nullable User user) + { + ExpDataClassImpl dc = getDataClass(user); + if (dc != null) + return new QueryRowReference(getContainer(), SCHEMA_EXP_DATA, dc.getName(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + + // Issue 40123: see MedImmuneDataHandler MEDIMMUNE_DATA_TYPE, this claims the "Data" namespace + DataType type = getDataType(); + if (type != null) + { + QueryRowReference queryRowReference = type.getQueryRowReference(this); + if (queryRowReference != null) + return queryRowReference; + } + + return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Data.name(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + } + + @Override + public List getTargetApplications() + { + return getTargetApplications(new SimpleFilter(FieldKey.fromParts("DataId"), getRowId()), ExperimentServiceImpl.get().getTinfoDataInput()); + } + + @Override + public List getTargetRuns() + { + return getTargetRuns(ExperimentServiceImpl.get().getTinfoDataInput(), "DataId"); + } + + @Override + public DataType getDataType() + { + return ExperimentService.get().getDataType(getLSIDNamespacePrefix()); + } + + @Override + public void setDataFileURI(URI uri) + { + ensureUnlocked(); + _object.setDataFileUrl(ExpData.normalizeDataFileURI(uri)); + } + + @Override + public void save(User user) + { + // Replace the default "Data" cpastype if the Data belongs to a DataClass + ExpDataClassImpl dataClass = getDataClass(); + if (dataClass != null && ExpData.DEFAULT_CPAS_TYPE.equals(getCpasType())) + setCpasType(dataClass.getLSID()); + + boolean isNew = getRowId() == 0; + save(user, ExperimentServiceImpl.get().getTinfoData(), true); + + if (isNew) + { + if (dataClass != null) + { + Map map = new HashMap<>(); + map.put("lsid", getLSID()); + Table.insert(user, dataClass.getTinfo(), map); + } + } + index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); + } + + @Override + protected void save(User user, TableInfo table, boolean ensureObject) + { + assert ensureObject; + super.save(user, table, true); + } + + @Override + public URI getDataFileURI() + { + String url = _object.getDataFileUrl(); + if (url == null) + return null; + try + { + return new URI(_object.getDataFileUrl()); + } + catch (URISyntaxException use) + { + return null; + } + } + + @Override + public ExperimentDataHandler findDataHandler() + { + return Handler.Priority.findBestHandler(ExperimentServiceImpl.get().getExperimentDataHandlers(), this); + } + + @Override + public String getDataFileUrl() + { + return _object.getDataFileUrl(); + } + + @Override + public boolean hasFileScheme() + { + return !FileUtil.hasCloudScheme(getDataFileUrl()); + } + + @Override + @Nullable + public File getFile() + { + return _object.getFile(); + } + + @Override + public @Nullable FileLike getFileLike() + { + return _object.getFileLike(); + } + + @Override + @Nullable + public java.nio.file.Path getFilePath() + { + return _object.getFilePath(); + } + + @Override + public boolean isInlineImage() + { + return null != getFile() && MIME_MAP.isInlineImageFor(getFile()); + } + + @Override + public void delete(User user) + { + delete(user, true); + } + + @Override + public void delete(User user, boolean deleteRunsUsingData) + { + ExperimentServiceImpl.get().deleteDataByRowIds(user, getContainer(), Collections.singleton(getRowId()), deleteRunsUsingData); + } + + public String getMimeType() + { + if (null != getDataFileUrl()) + return MIME_MAP.getContentTypeFor(getDataFileUrl()); + else + return null; + } + + @Override + public boolean isFileOnDisk() + { + java.nio.file.Path f = getFilePath(); + if (f != null) + if (!FileUtil.hasCloudScheme(f)) + return NetworkDrive.exists(f.toFile()) && !Files.isDirectory(f); + else + return Files.exists(f); + else + return false; + } + + public boolean isPathAccessible() + { + java.nio.file.Path path = getFilePath(); + return (null != path && Files.exists(path)); + } + + @Override + public String getCpasType() + { + String result = _object.getCpasType(); + if (result != null) + return result; + + ExpDataClass dataClass = getDataClass(); + if (dataClass != null) + return dataClass.getLSID(); + + return ExpData.DEFAULT_CPAS_TYPE; + } + + public void setGenerated(boolean generated) + { + ensureUnlocked(); + _object.setGenerated(generated); + } + + @Override + public boolean isGenerated() + { + return _object.isGenerated(); + } + + @Override + public boolean isFinalRunOutput() + { + if (_finalRunOutput == null) + { + ExpRun run = getRun(); + _finalRunOutput = run != null && run.isFinalOutput(this); + } + return _finalRunOutput.booleanValue(); + } + + @Override + @Nullable + public ExpDataClassImpl getDataClass() + { + return getDataClass(null); + } + + @Override + @Nullable + public ExpDataClassImpl getDataClass(@Nullable User user) + { + if (_object.getClassId() != null && getContainer() != null) + { + if (user == null) + return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId()); + else + return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId(), true); + } + + return null; + } + + @Override + public void importDataFile(PipelineJob job, XarSource xarSource) throws ExperimentException + { + String dataFileURL = getDataFileUrl(); + if (dataFileURL == null) + return; + + if (xarSource.shouldIgnoreDataFiles()) + { + job.debug("Skipping load of data file " + dataFileURL + " based on the XAR source"); + return; + } + + job.debug("Trying to load data file " + dataFileURL + " into the system"); + + java.nio.file.Path path = FileUtil.stringToPath(getContainer(), dataFileURL); + + if (!Files.exists(path)) + { + job.debug("Unable to find the data file " + FileUtil.getAbsolutePath(getContainer(), path) + " on disk."); + return; + } + + // Check that the file is under the pipeline root to prevent users from referencing a file that they + // don't have permission to import + PipeRoot pr = PipelineService.get().findPipelineRoot(job.getContainer()); + if (!xarSource.allowImport(pr, job.getContainer(), path)) + { + if (pr == null) + { + job.warn("No pipeline root was set, skipping load of file " + FileUtil.getAbsolutePath(getContainer(), path)); + return; + } + job.debug("The data file " + FileUtil.getAbsolutePath(getContainer(), path) + " is not under the folder's pipeline root: " + pr + ". It will not be loaded directly, but may be loaded if referenced from other files that are under the pipeline root."); + return; + } + + ExperimentDataHandler handler = findDataHandler(); + try + { + handler.importFile(this, FileSystemLike.wrapFile(path), job.getInfo(), job.getLogger(), xarSource.getXarContext()); + } + catch (ExperimentException e) + { + throw new XarFormatException(e); + } + + job.debug("Finished trying to load data file " + dataFileURL + " into the system"); + } + + // Get all text and int strings from the data class for indexing + private void getIndexValues( + Map props, + @NotNull ExpDataClassDataTableImpl table, + Set identifiersHi, + Set identifiersMed, + Set identifiersLo, + Set keywordHi, + Set keywordMed, + Set keywordsLo, + JSONObject jsonData + ) + { + CaseInsensitiveHashSet skipColumns = new CaseInsensitiveHashSet(); + for (ExpDataClassDataTable.Column column : ExpDataClassDataTable.Column.values()) + skipColumns.add(column.name()); + skipColumns.add("Ancestors"); + skipColumns.add("Container"); + + processIndexValues(props, table, skipColumns, identifiersHi, identifiersMed, identifiersLo, keywordHi, keywordMed, keywordsLo, jsonData); + } + + @Override + @NotNull + public Collection getAliases() + { + TableInfo mapTi = ExperimentService.get().getTinfoDataAliasMap(); + TableInfo ti = ExperimentService.get().getTinfoAlias(); + SQLFragment sql = new SQLFragment() + .append("SELECT a.name FROM ").append(mapTi, "m") + .append(" JOIN ").append(ti, "a") + .append(" ON m.alias = a.RowId WHERE m.lsid = ? "); + sql.add(getLSID()); + ArrayList aliases = new SqlSelector(mapTi.getSchema(), sql).getArrayList(String.class); + return Collections.unmodifiableList(aliases); + } + + @Override + public String getDocumentId() + { + String dataClassName = "-"; + ExpDataClass dc = getDataClass(); + if (dc != null) + dataClassName = dc.getName(); + // why not just data:rowId? + return "data:" + new Path(getContainer().getId(), dataClassName, Long.toString(getRowId())).encode(); + } + + @Override + protected TableSelector getObjectPropertiesSelector(@NotNull TableInfo table) + { + return new TableSelector(table, new SimpleFilter(ExpDataTable.Column.RowId.fieldKey(), getRowId()), null); + } + + @Override + public Map getObjectProperties() + { + return getObjectProperties(getDataClass()); + } + + @Override + public Map getObjectProperties(@Nullable User user) + { + return getObjectProperties(getDataClass(user)); + } + + private Map getObjectProperties(ExpDataClassImpl dataClass) + { + HashMap ret = new HashMap<>(super.getObjectProperties()); + var ti = null == dataClass ? null : dataClass.getTinfo(); + if (null != ti) + { + ret.putAll(getObjectProperties(ti)); + } + return ret; + } + + private static Pair getRowIdClassNameContainerFromDocumentId(String resourceIdentifier, Map dcCache) + { + if (resourceIdentifier.startsWith("data:")) + resourceIdentifier = resourceIdentifier.substring("data:".length()); + + Path path = Path.parse(resourceIdentifier); + if (path.size() != 3) + return null; + String containerId = path.get(0); + String dataClassName = path.get(1); + String rowIdString = path.get(2); + + long rowId; + try + { + rowId = Long.parseLong(rowIdString); + if (rowId == 0) + return null; + } + catch (NumberFormatException ex) + { + return null; + } + + Container c = ContainerManager.getForId(containerId); + if (c == null) + return null; + + ExpDataClass dc = null; + if (!StringUtils.isEmpty(dataClassName) && !dataClassName.equals("-")) + { + String dcKey = containerId + '-' + dataClassName; + dc = dcCache.computeIfAbsent(dcKey, (x) -> ExperimentServiceImpl.get().getDataClass(c, dataClassName)); + } + + return new Pair<>(rowId, dc); + } + + @Nullable + public static ExpDataImpl fromDocumentId(String resourceIdentifier) + { + Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, new HashMap<>()); + if (rowIdDataClass == null) + return null; + + Long rowId = rowIdDataClass.first; + ExpDataClass dc = rowIdDataClass.second; + + if (dc != null) + return ExperimentServiceImpl.get().getExpData(dc, rowId); + else + return ExperimentServiceImpl.get().getExpData(rowId); + } + + @Nullable + public static Map fromDocumentIds(Collection resourceIdentifiers) + { + Map rowIdIdentifierMap = new LongHashMap<>(); + Map dcCache = new HashMap<>(); + Map dcMap = new LongHashMap<>(); + Map> dcRowIdMap = new LongHashMap<>(); // data rowIds with dataClass + List rowIds = new ArrayList<>(); // data rowIds without dataClass + for (String resourceIdentifier : resourceIdentifiers) + { + Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, dcCache); + if (rowIdDataClass == null) + continue; + + Long rowId = rowIdDataClass.first; + ExpDataClass dc = rowIdDataClass.second; + + rowIdIdentifierMap.put(rowId, resourceIdentifier); + + if (dc != null) + { + dcMap.put(dc.getRowId(), dc); + dcRowIdMap + .computeIfAbsent(dc.getRowId(), (k) -> new ArrayList<>()) + .add(rowId); + } + else + rowIds.add(rowId); + } + + List expDatas = new ArrayList<>(); + if (!rowIds.isEmpty()) + expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(rowIds)); + + if (!dcRowIdMap.isEmpty()) + { + for (Long dataClassId : dcRowIdMap.keySet()) + { + ExpDataClass dc = dcMap.get(dataClassId); + if (dc != null) + expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(dc, dcRowIdMap.get(dataClassId))); + } + } + + Map identifierDatas = new HashMap<>(); + for (ExpData data : expDatas) + { + identifierDatas.put(rowIdIdentifierMap.get(data.getRowId()), data); + } + + return identifierDatas; + } + + @Override + public @Nullable URI getWebDavURL(@NotNull FileContentService.PathType type) + { + java.nio.file.Path path = getFilePath(); + if (path == null) + { + return null; + } + + Container c = getContainer(); + if (c == null) + { + return null; + } + + return FileContentService.get().getWebDavUrl(path, c, type); + } + + @Override + public @Nullable WebdavResource createIndexDocument(@Nullable TableInfo tableInfo) + { + Container container = getContainer(); + if (container == null) + return null; + + Map props = new HashMap<>(); + JSONObject jsonData = new JSONObject(); + Set keywordsHi = new HashSet<>(); + Set keywordsMed = new HashSet<>(); + Set keywordsLo = new HashSet<>(); + + Set identifiersHi = new HashSet<>(); + Set identifiersMed = new HashSet<>(); + Set identifiersLo = new HashSet<>(); + + StringBuilder body = new StringBuilder(); + + // Name is an identifier with the highest weight + identifiersHi.add(getName()); + keywordsMed.add(getName()); // also add to keywords since those are stemmed + + // Description is added as a keywordsLo -- in Biologics it is common for the description to + // contain names of other DataClasses, e.g., "Mature desK of PS-10", which would be tokenized as + // [mature, desk, ps, 10] if added it as a keyword so we lower its priority to avoid useless results. + // CONSIDER: tokenize the description and extract identifiers + if (null != getDescription()) + keywordsLo.add(getDescription()); + + String comment = getComment(); + if (comment != null) + keywordsMed.add(comment); + + // Add aliases in parentheses in the title + StringBuilder title = new StringBuilder(getName()); + Collection aliases = getAliases(); + if (!aliases.isEmpty()) + { + title.append(" (").append(StringUtils.join(aliases, ", ")).append(")"); + identifiersHi.addAll(aliases); + } + + ExpDataClassImpl dc = getDataClass(User.getSearchUser()); + if (dc != null) + { + ActionURL show = new ActionURL(ExperimentController.ShowDataClassAction.class, container).addParameter("rowId", dc.getRowId()); + NavTree t = new NavTree(dc.getName(), show); + String nav = NavTree.toJS(Collections.singleton(t), null, false, true).toString(); + props.put(SearchService.PROPERTY.navtrail.toString(), nav); + + props.put(DataSearchResultTemplate.PROPERTY, dc.getName()); + body.append(dc.getName()); + + if (tableInfo == null) + tableInfo = QueryService.get().getUserSchema(User.getSearchUser(), container, SCHEMA_EXP_DATA).getTable(dc.getName()); + + if (!(tableInfo instanceof ExpDataClassDataTableImpl expDataClassDataTable)) + throw new IllegalArgumentException(String.format("Unable to index data class item in %s. Table must be an instance of %s", dc.getName(), ExpDataClassDataTableImpl.class.getName())); + + if (!expDataClassDataTable.getDataClass().equals(dc)) + throw new IllegalArgumentException(String.format("Data class table mismatch for %s", dc.getName())); + + // Collect other text columns and lookup display columns + getIndexValues(props, expDataClassDataTable, identifiersHi, identifiersMed, identifiersLo, keywordsHi, keywordsMed, keywordsLo, jsonData); + } + + // === Stored, not indexed + if (dc != null && dc.isMedia()) + props.put(SearchService.PROPERTY.categories.toString(), expMediaDataCategory.toString()); + else + props.put(SearchService.PROPERTY.categories.toString(), expDataCategory.toString()); + props.put(SearchService.PROPERTY.title.toString(), title.toString()); + props.put(SearchService.PROPERTY.jsonData.toString(), jsonData); + + ActionURL view = ExperimentController.ExperimentUrlsImpl.get().getDataDetailsURL(this); + view.setExtraPath(container.getId()); + String docId = getDocumentId(); + + // Generate a summary explicitly instead of relying on a summary to be extracted + // from the document body. Placing lookup values and the description in the body + // would tokenize using the English analyzer and index "PS-12" as ["ps", "12"] which leads to poor results. + StringBuilder summary = new StringBuilder(); + if (StringUtils.isNotEmpty(getDescription())) + summary.append(getDescription()).append("\n"); + + appendTokens(summary, keywordsMed); + appendTokens(summary, identifiersMed); + appendTokens(summary, identifiersLo); + + props.put(SearchService.PROPERTY.summary.toString(), summary); + + return new ExpDataResource( + getRowId(), + new Path(docId), + docId, + container.getEntityId(), + "text/plain", + body.toString(), + view, + props, + getCreatedBy(), + getCreated(), + getModifiedBy(), + getModified() + ); + } + + private static void appendTokens(StringBuilder sb, Collection toks) + { + if (toks.isEmpty()) + return; + + sb.append(toks.stream().map(s -> s.length() > 30 ? StringUtilsLabKey.leftSurrogatePairFriendly(s, 30) + "\u2026" : s).collect(Collectors.joining(", "))).append("\n"); + } + + private static class ExpDataResource extends SimpleDocumentResource + { + final long _rowId; + + public ExpDataResource(long rowId, Path path, String documentId, GUID containerId, String contentType, String body, URLHelper executeUrl, Map properties, User createdBy, Date created, User modifiedBy, Date modified) + { + super(path, documentId, containerId, contentType, body, executeUrl, createdBy, created, modifiedBy, modified, properties); + _rowId = rowId; + } + + @Override + public void setLastIndexed(long ms, long modified) + { + ExperimentServiceImpl.get().setDataLastIndexed(_rowId, ms); + } + } + + public static class DataSearchResultTemplate implements SearchResultTemplate + { + public static final String NAME = "data"; + public static final String PROPERTY = "dataclass"; + + @Nullable + @Override + public String getName() + { + return NAME; + } + + private ExpDataClass getDataClass() + { + if (HttpView.hasCurrentView()) + { + ViewContext ctx = HttpView.currentContext(); + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + return ExperimentService.get().getDataClass(ctx.getContainer(), dataclass, true); + } + return null; + } + + @Nullable + @Override + public String getCategories() + { + ExpDataClass dataClass = getDataClass(); + + if (dataClass != null && dataClass.isMedia()) + return expMediaDataCategory.getName(); + + return expDataCategory.getName(); + } + + @Nullable + @Override + public SearchScope getSearchScope() + { + return SearchScope.FolderAndSubfolders; + } + + @NotNull + @Override + public String getResultNameSingular() + { + ExpDataClass dc = getDataClass(); + if (dc != null) + return dc.getName(); + return "data"; + } + + @NotNull + @Override + public String getResultNamePlural() + { + return getResultNameSingular(); + } + + @Override + public boolean includeNavigationLinks() + { + return true; + } + + @Override + public boolean includeAdvanceUI() + { + return false; + } + + @Nullable + @Override + public HtmlString getExtraHtml(ViewContext ctx) + { + String q = ctx.getActionURL().getParameter("q"); + + if (StringUtils.isNotBlank(q)) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + ActionURL url = ctx.cloneActionURL().deleteParameter(PROPERTY); + url.replaceParameter(ActionURL.Param._dc, (int)Math.round(1000 * Math.random())); + + StringBuilder html = new StringBuilder(); + html.append("
"); + + appendParam(html, null, dataclass, "All", false, url); + for (ExpDataClass dc : ExperimentService.get().getDataClasses(ctx.getContainer(), true)) + { + appendParam(html, dc.getName(), dataclass, dc.getName(), true, url); + } + + html.append("
"); + return HtmlString.unsafe(html.toString()); + } + else + { + return null; + } + } + + private void appendParam(StringBuilder sb, @Nullable String dataclass, @Nullable String current, @NotNull String label, boolean addParam, ActionURL url) + { + sb.append(""); + + if (!Objects.equals(dataclass, current)) + { + if (addParam) + url = url.clone().addParameter(PROPERTY, dataclass); + + sb.append(LinkBuilder.simpleLink(label, url)); + } + else + { + sb.append(label); + } + + sb.append(" "); + } + + @Override + public HtmlString getHiddenInputsHtml(ViewContext ctx) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + { + return InputBuilder.hidden().id("search-type").name(PROPERTY).value(dataclass).getHtmlString(); + } + + return null; + } + + + @Override + public String reviseQuery(ViewContext ctx, String q) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + + if (null != dataclass) + return "+(" + q + ") +" + PROPERTY + ":" + dataclass; + else + return q; + } + + @Override + public void addNavTrail(NavTree root, ViewContext ctx, @NotNull SearchScope scope, @Nullable String category) + { + SearchResultTemplate.super.addNavTrail(root, ctx, scope, category); + + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + { + String text = root.getText(); + root.setText(text + " - " + dataclass); + } + } + } +} From 4d9ff01456a44b2988aa6fc805f9cdca397588b0 Mon Sep 17 00:00:00 2001 From: XingY Date: Tue, 10 Mar 2026 20:10:27 -0700 Subject: [PATCH 4/7] fix build --- ...DataClassUpdateAddColumnsDataIterator.java | 167 ++++++++++++++++++ .../dataiterator/StatementDataIterator.java | 7 +- .../api/query/DefaultQueryUpdateService.java | 18 ++ .../api/ExpDataClassDataTableImpl.java | 20 ++- .../experiment/api/ExpDataClassType.java | 3 + .../api/SampleTypeUpdateServiceDI.java | 18 +- 6 files changed, 213 insertions(+), 20 deletions(-) create mode 100644 api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java diff --git a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java new file mode 100644 index 00000000000..277438cae6f --- /dev/null +++ b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java @@ -0,0 +1,167 @@ +package org.labkey.api.dataiterator; + +import org.apache.commons.lang3.StringUtils; +import org.labkey.api.collections.IntHashMap; +import org.labkey.api.collections.Sets; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.CompareType; +import org.labkey.api.data.Container; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.FieldKey; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +import static org.labkey.api.exp.query.ExpDataTable.Column.LSID; +import static org.labkey.api.exp.query.ExpDataTable.Column.ClassId; +import static org.labkey.api.util.IntegerUtils.asInteger; + +/** + * DataIterator that adds the LSID column for DataClass update operations. + * Queries the LSID from exp.data based on the provided key (rowId or name) and dataClassId. + * The LSID is needed downstream for attachment handling. + */ +public class DataClassUpdateAddColumnsDataIterator extends WrapperDataIterator +{ + private final Container _targetContainer; + private final TableInfo _tableInfo; + final CachingDataIterator _unwrapped; + + private final long _dataClassId; + final int _lsidColIndex; + final ColumnInfo pkColumn; + final Supplier pkSupplier; + + int lastPrefetchRowNumber = -1; + final IntHashMap lsids = new IntHashMap<>(); + + public DataClassUpdateAddColumnsDataIterator(DataIterator in, TableInfo target, Container container, long dataClassId, String keyColumnName) + { + super(in); + this._unwrapped = (CachingDataIterator)in; + _tableInfo = target; + _targetContainer = container; + _dataClassId = dataClassId; + + var map = DataIteratorUtil.createColumnNameMap(in); + + this._lsidColIndex = map.get(ExpDataTable.Column.LSID.name()); + + Integer index = map.get(keyColumnName); + ColumnInfo col = target.getColumn(keyColumnName); + if (null == index || null == col) + throw new IllegalArgumentException("Key column not found: " + keyColumnName); + pkSupplier = in.getSupplier(index); + pkColumn = col; + } + + @Override + public Supplier getSupplier(int i) + { + if (i != _lsidColIndex) + return _delegate.getSupplier(i); + return () -> get(i); + } + + @Override + public Object get(int i) + { + Integer rowNumber = asInteger(_delegate.get(0)); + + if (i == _lsidColIndex) + return lsids.get(rowNumber); + + return _delegate.get(i); + } + + @Override + public boolean isConstant(int i) + { + if (i != _lsidColIndex) + return _delegate.isConstant(i); + return false; + } + + @Override + public Object getConstantValue(int i) + { + if (i != _lsidColIndex) + return _delegate.getConstantValue(i); + return null; + } + + protected void prefetchExisting() throws BatchValidationException + { + Integer rowNumber = asInteger(_delegate.get(0)); + if (rowNumber <= lastPrefetchRowNumber) + return; + + lsids.clear(); + + int rowsToFetch = 50; + String keyFieldName = pkColumn.getName(); + boolean numericKey = pkColumn.isNumericType(); + JdbcType jdbcType = pkColumn.getJdbcType(); + Map rowKeyMap = new LinkedHashMap<>(); + Map keyRowMap = new LinkedHashMap<>(); + do + { + lastPrefetchRowNumber = asInteger(_delegate.get(0)); + Object keyObj = pkSupplier.get(); + Object key = jdbcType.convert(keyObj); + + if (numericKey) + { + if (null == key) + throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); + } + else if (StringUtils.isEmpty((String) key)) + throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); + + rowKeyMap.put(lastPrefetchRowNumber, key); + keyRowMap.put(key, lastPrefetchRowNumber); + lsids.put(lastPrefetchRowNumber, null); + } + while (--rowsToFetch > 0 && _delegate.next()); + + SimpleFilter filter = new SimpleFilter(ClassId.fieldKey(), _dataClassId); + filter.addCondition(pkColumn.getFieldKey(), rowKeyMap.values(), CompareType.IN); + filter.addCondition(FieldKey.fromParts("Container"), _targetContainer); + + Set columns = Sets.newCaseInsensitiveHashSet(keyFieldName, LSID.name()); + Map[] results = new TableSelector(ExperimentService.get().getTinfoData(), columns, filter, null).getMapArray(); + + for (Map result : results) + { + Object key = result.get(keyFieldName); + Object lsidObj = result.get(LSID.name()); + + Integer rowInd = keyRowMap.get(key); + if (lsidObj != null) + lsids.put(rowInd, (String) lsidObj); + } + + // backup to where we started so caller can iterate through them one at a time + _unwrapped.reset(); // unwrapped _delegate + _delegate.next(); + } + + @Override + public boolean next() throws BatchValidationException + { + // NOTE: we have to call mark() before we call next() if we want the 'next' row to be cached + _unwrapped.mark(); // unwrapped _delegate + boolean ret = super.next(); + if (ret) + prefetchExisting(); + return ret; + } +} \ No newline at end of file diff --git a/api/src/org/labkey/api/dataiterator/StatementDataIterator.java b/api/src/org/labkey/api/dataiterator/StatementDataIterator.java index bf55b98b82c..4e4d9f11767 100644 --- a/api/src/org/labkey/api/dataiterator/StatementDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/StatementDataIterator.java @@ -537,7 +537,12 @@ private void checkBackgroundException() throws BatchValidationException public Object get(int i) { if (null != _keyColumnInfo.get(i)) - return _keyValues.get(i); + { + Object value = _keyValues.get(i); + if (value != null) + return value; + } + return _data.get(i); } diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index d04ea279c65..79f64079d88 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -28,6 +28,7 @@ import org.labkey.api.data.Container; import org.labkey.api.data.ConvertHelper; import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.ImportAliasable; import org.labkey.api.data.JdbcType; import org.labkey.api.data.MvUtil; import org.labkey.api.data.Parameter; @@ -933,4 +934,21 @@ protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIterator context.setCrossFolderImport(false); } } + + public static @Nullable String getKeyColumnAliasForUpdate(TableInfo tableInfo, @NotNull Map columnNameMap) + { + // Currently, SampleUpdateAddColumnsDataIterator and DataClassUpdateAddColumnsDataIterator is being called before a translator is invoked to + // remap column labels to columns (e.g., "Row Id" -> "RowId"). Due to this, we need to search the + // map of columns for the key column. + var rowIdAliases = ImportAliasable.Helper.createImportSet(tableInfo.getColumn(FieldKey.fromParts("RowId"))); + rowIdAliases.retainAll(columnNameMap.keySet()); + + if (rowIdAliases.size() == 1) + return rowIdAliases.iterator().next(); + if (rowIdAliases.isEmpty()) + return "Name"; + + return null; + } + } diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index 2bffcbdb2b9..8926daf89b0 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -56,7 +56,9 @@ import org.labkey.api.data.UpdateableTableInfo; import org.labkey.api.data.dialect.SqlDialect; import org.labkey.api.dataiterator.AttachmentDataIterator; +import org.labkey.api.dataiterator.CachingDataIterator; import org.labkey.api.dataiterator.CoerceDataIterator; +import org.labkey.api.dataiterator.DataClassUpdateAddColumnsDataIterator; import org.labkey.api.dataiterator.DataIterator; import org.labkey.api.dataiterator.DataIteratorBuilder; import org.labkey.api.dataiterator.DataIteratorContext; @@ -152,11 +154,13 @@ import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.labkey.api.dataiterator.DataIteratorUtil.DUPLICATE_COLUMN_IN_DATA_ERROR; import static org.labkey.api.exp.api.ExpRunItem.PARENT_IMPORT_ALIAS_MAP_PROP; import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.Name; import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.QueryableInputs; import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.RowId; import static org.labkey.api.exp.query.ExpMaterialTable.Column.LSID; +import static org.labkey.api.query.DefaultQueryUpdateService.getKeyColumnAliasForUpdate; import static org.labkey.experiment.ExpDataIterators.incrementCounts; public class ExpDataClassDataTableImpl extends ExpRunItemTableImpl implements ExpDataClassDataTable @@ -1064,16 +1068,28 @@ else if (Column.ClassId.name().equalsIgnoreCase(name)) ColumnInfo cpasTypeCol = expData.getColumn("cpasType"); step0.addColumn(cpasTypeCol, new SimpleTranslator.ConstantColumn(_dataClass.getLSID())); + Map columnNameMap = DataIteratorUtil.createColumnNameMap(input); + if (context.getInsertOption() == QueryUpdateService.InsertOption.UPDATE) { + String keyColumnAlias = getKeyColumnAliasForUpdate(expData, columnNameMap); + if (keyColumnAlias == null) + { + context.getErrors().addRowError(new ValidationException(String.format(DUPLICATE_COLUMN_IN_DATA_ERROR, ExpDataTable.Column.RowId.name()))); + return null; + } + step0.addNullColumn(Column.LSID.name(), JdbcType.VARCHAR); step0.selectAll(); - return LoggingDataIterator.wrap(step0.getDataIterator(context)); + + // add lsid column (for Attachment) but need to re-query it + var added = new DataClassUpdateAddColumnsDataIterator(new CachingDataIterator(step0), expData, c ,_dataClass.getRowId(), keyColumnAlias); + return LoggingDataIterator.wrap(added); } step0.selectAll(Sets.newCaseInsensitiveHashSet("lsid", "dataClass", "genId")); //TODO can this be moved up? // Ensure we have a name column -- makes the NameExpressionDataIterator easier - if (!DataIteratorUtil.createColumnNameMap(step0).containsKey("name")) + if (!columnNameMap.containsKey("name")) { ColumnInfo nameCol = expData.getColumn("name"); step0.addColumn(nameCol, (Supplier)() -> null); diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java index 749137c049e..877016f47b5 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java @@ -88,6 +88,9 @@ public static AttachmentParentType get() )) .append(" AS Description FROM expdataclass.") .append(domain.getStorageTableName()) + .append(" ds JOIN ") + .append(ExperimentService.get().getTinfoData()) + .append(" d ON d.rowId = ds.rowid") .append(" WHERE ").append(where) ); }); diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index 9deaccfce62..7279bda6fa5 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -1329,7 +1329,7 @@ public DataIterator getDataIterator(DataIteratorContext context) addAliquotedFrom.addNullColumn(PARENT_RECOMPUTE_NAME_COL, JdbcType.VARCHAR); addAliquotedFrom.selectAll(); - String keyColumnAlias = getKeyColumnAlias(materialTable, columnNameMap); + String keyColumnAlias = getKeyColumnAliasForUpdate(materialTable, columnNameMap); if (keyColumnAlias == null) { context.getErrors().addRowError(new ValidationException(String.format(DUPLICATE_COLUMN_IN_DATA_ERROR, RowId.name()))); @@ -1379,22 +1379,6 @@ public DataIterator getDataIterator(DataIteratorContext context) return LoggingDataIterator.wrap(names); } - private static @Nullable String getKeyColumnAlias(TableInfo materialTable, @NotNull Map columnNameMap) - { - // Currently, SampleUpdateAddColumnsDataIterator is being called before a translator is invoked to - // remap column labels to columns (e.g., "Row Id" -> "RowId"). Due to this, we need to search the - // map of columns for the key column. - var rowIdAliases = ImportAliasable.Helper.createImportSet(materialTable.getColumn(RowId.fieldKey())); - rowIdAliases.retainAll(columnNameMap.keySet()); - - if (rowIdAliases.size() == 1) - return rowIdAliases.iterator().next(); - if (rowIdAliases.isEmpty()) - return Name.name(); - - return null; - } - private static boolean isReservedHeader(String name) { if (isNameHeader(name) || isDescriptionHeader(name) || isCommentHeader(name) || "CpasType".equalsIgnoreCase(name) || isAliasHeader(name)) From a68e8a818ff0fd8fc990155c459bc430bcceb707 Mon Sep 17 00:00:00 2001 From: XingY Date: Wed, 11 Mar 2026 13:15:20 -0700 Subject: [PATCH 5/7] fix merge --- .../labkey/api/dataiterator/ExistingRecordDataIterator.java | 2 ++ .../org/labkey/experiment/api/ExpDataClassDataTableImpl.java | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java b/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java index c39acf459a1..8816c3a6f55 100644 --- a/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java @@ -153,6 +153,8 @@ public Supplier getSupplier(int i) @Override public Object get(int i) { + assert(i <= existingColIndex); + if (i)() -> null); } + if (context.getSelectIds() && !columnNameMap.containsKey(RowId.name())) + { + step0.addNullColumn(RowId.name(), JdbcType.INTEGER); + } + ColumnInfo lsidCol = expData.getColumn("lsid"); // TODO: validate dataFileUrl column, it will be saved later From 9d5aa75562e58c846936e6ec5db76692e3b4d3e5 Mon Sep 17 00:00:00 2001 From: XingY Date: Wed, 11 Mar 2026 13:19:24 -0700 Subject: [PATCH 6/7] fix merge --- .../org/labkey/experiment/api/ExpDataClassDataTableImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index 7d55127fc28..3d61774d5ed 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -1095,7 +1095,7 @@ else if (Column.ClassId.name().equalsIgnoreCase(name)) step0.addColumn(nameCol, (Supplier)() -> null); } - if (context.getSelectIds() && !columnNameMap.containsKey(RowId.name())) + if (Boolean.TRUE.equals(context.getSelectIds()) && !columnNameMap.containsKey(RowId.name())) { step0.addNullColumn(RowId.name(), JdbcType.INTEGER); } From 48936604e982b69f636d5e6efe200a95cf1149a7 Mon Sep 17 00:00:00 2001 From: XingY Date: Wed, 11 Mar 2026 17:12:44 -0700 Subject: [PATCH 7/7] fix update --- ...DataClassUpdateAddColumnsDataIterator.java | 31 ++++++++++++++++--- .../test/integration/DataClassCrud.ispec.ts | 6 ++-- .../api/ExpDataClassDataTableImpl.java | 4 ++- 3 files changed, 32 insertions(+), 9 deletions(-) diff --git a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java index 277438cae6f..549825cc65a 100644 --- a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java @@ -1,6 +1,7 @@ package org.labkey.api.dataiterator; import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; import org.labkey.api.collections.IntHashMap; import org.labkey.api.collections.Sets; import org.labkey.api.data.ColumnInfo; @@ -14,7 +15,9 @@ import org.labkey.api.exp.query.ExpDataTable; import org.labkey.api.query.BatchValidationException; import org.labkey.api.query.FieldKey; +import org.labkey.api.query.ValidationException; +import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; @@ -42,11 +45,13 @@ public class DataClassUpdateAddColumnsDataIterator extends WrapperDataIterator int lastPrefetchRowNumber = -1; final IntHashMap lsids = new IntHashMap<>(); + final DataIteratorContext _context; - public DataClassUpdateAddColumnsDataIterator(DataIterator in, TableInfo target, Container container, long dataClassId, String keyColumnName) + public DataClassUpdateAddColumnsDataIterator(DataIterator in, @NotNull DataIteratorContext context, TableInfo target, Container container, long dataClassId, String keyColumnName) { super(in); this._unwrapped = (CachingDataIterator)in; + _context = context; _tableInfo = target; _targetContainer = container; _dataClassId = dataClassId; @@ -111,7 +116,9 @@ protected void prefetchExisting() throws BatchValidationException boolean numericKey = pkColumn.isNumericType(); JdbcType jdbcType = pkColumn.getJdbcType(); Map rowKeyMap = new LinkedHashMap<>(); - Map keyRowMap = new LinkedHashMap<>(); + Map> keyRowMap = new LinkedHashMap<>(); + Set notFoundKeys = new HashSet<>(); + do { lastPrefetchRowNumber = asInteger(_delegate.get(0)); @@ -127,7 +134,11 @@ else if (StringUtils.isEmpty((String) key)) throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); rowKeyMap.put(lastPrefetchRowNumber, key); - keyRowMap.put(key, lastPrefetchRowNumber); + notFoundKeys.add(key); + // if keyRowMap doesn't contain key, add new set, then add row number to set for this key + if (!keyRowMap.containsKey(key)) + keyRowMap.put(key, new HashSet<>()); + keyRowMap.get(key).add(lastPrefetchRowNumber); lsids.put(lastPrefetchRowNumber, null); } while (--rowsToFetch > 0 && _delegate.next()); @@ -144,11 +155,18 @@ else if (StringUtils.isEmpty((String) key)) Object key = result.get(keyFieldName); Object lsidObj = result.get(LSID.name()); - Integer rowInd = keyRowMap.get(key); + Set rowInds = keyRowMap.get(key); if (lsidObj != null) - lsids.put(rowInd, (String) lsidObj); + { + for (Integer rowInd : rowInds) + lsids.put(rowInd, (String) lsidObj); + notFoundKeys.remove(key); + } } + if (!notFoundKeys.isEmpty()) + _context.getErrors().addRowError(new ValidationException("Data not found for " + notFoundKeys)); + // backup to where we started so caller can iterate through them one at a time _unwrapped.reset(); // unwrapped _delegate _delegate.next(); @@ -157,6 +175,9 @@ else if (StringUtils.isEmpty((String) key)) @Override public boolean next() throws BatchValidationException { + if (_context.getErrors().hasErrors()) + return false; + // NOTE: we have to call mark() before we call next() if we want the 'next' row to be cached _unwrapped.mark(); // unwrapped _delegate boolean ret = super.next(); diff --git a/experiment/src/client/test/integration/DataClassCrud.ispec.ts b/experiment/src/client/test/integration/DataClassCrud.ispec.ts index f84394137b7..e1c5d7dce95 100644 --- a/experiment/src/client/test/integration/DataClassCrud.ispec.ts +++ b/experiment/src/client/test/integration/DataClassCrud.ispec.ts @@ -297,11 +297,11 @@ describe('Import with update / merge', () => { // Issue 52922: Blank / bogus id in the file are getting ignored in update from file let blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "UPDATE", topFolderOptions, editorUserOptions); - expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); + expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_WITH_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "UPDATE", subfolder1Options, editorUserOptions); - expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); + expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_WITH_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\n\tisBlank", dataType, "UPDATE", topFolderOptions, editorUserOptions); - expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); + expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_WITH_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "MERGE", topFolderOptions, editorUserOptions); expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "MERGE", subfolder1Options, editorUserOptions); diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index 3d61774d5ed..4b7ae4a05a0 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -1082,7 +1082,7 @@ else if (Column.ClassId.name().equalsIgnoreCase(name)) step0.selectAll(); // add lsid column (for Attachment) but need to re-query it - var added = new DataClassUpdateAddColumnsDataIterator(new CachingDataIterator(step0), expData, c ,_dataClass.getRowId(), keyColumnAlias); + var added = new DataClassUpdateAddColumnsDataIterator(new CachingDataIterator(step0), context, expData, c ,_dataClass.getRowId(), keyColumnAlias); return LoggingDataIterator.wrap(added); } @@ -1498,6 +1498,8 @@ public void configureDataIteratorContext(DataIteratorContext context) context.putConfigParameter(QueryUpdateService.ConfigParameters.CheckForCrossProjectData, true); if (context.getInsertOption() == InsertOption.IMPORT || context.getInsertOption() == InsertOption.MERGE) context.setSelectIds(true); // select rowId because provisioned expdataclass.rowId and QueryUpdateAuditEvent.rowPk needs actual rowId + else if (context.getSelectIds() == null && context.getInsertOption() == InsertOption.UPDATE) + context.setSelectIds(false); // for update, don't add RowId if it wasn't in the input (without setSelectIds(false), rowId col will be added if table.hasTriggers by TableInsertUpdateDataIterator } @Override