diff --git a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java new file mode 100644 index 00000000000..549825cc65a --- /dev/null +++ b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java @@ -0,0 +1,188 @@ +package org.labkey.api.dataiterator; + +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.labkey.api.collections.IntHashMap; +import org.labkey.api.collections.Sets; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.CompareType; +import org.labkey.api.data.Container; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.ValidationException; + +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +import static org.labkey.api.exp.query.ExpDataTable.Column.LSID; +import static org.labkey.api.exp.query.ExpDataTable.Column.ClassId; +import static org.labkey.api.util.IntegerUtils.asInteger; + +/** + * DataIterator that adds the LSID column for DataClass update operations. + * Queries the LSID from exp.data based on the provided key (rowId or name) and dataClassId. + * The LSID is needed downstream for attachment handling. + */ +public class DataClassUpdateAddColumnsDataIterator extends WrapperDataIterator +{ + private final Container _targetContainer; + private final TableInfo _tableInfo; + final CachingDataIterator _unwrapped; + + private final long _dataClassId; + final int _lsidColIndex; + final ColumnInfo pkColumn; + final Supplier pkSupplier; + + int lastPrefetchRowNumber = -1; + final IntHashMap lsids = new IntHashMap<>(); + final DataIteratorContext _context; + + public DataClassUpdateAddColumnsDataIterator(DataIterator in, @NotNull DataIteratorContext context, TableInfo target, Container container, long dataClassId, String keyColumnName) + { + super(in); + this._unwrapped = (CachingDataIterator)in; + _context = context; + _tableInfo = target; + _targetContainer = container; + _dataClassId = dataClassId; + + var map = DataIteratorUtil.createColumnNameMap(in); + + this._lsidColIndex = map.get(ExpDataTable.Column.LSID.name()); + + Integer index = map.get(keyColumnName); + ColumnInfo col = target.getColumn(keyColumnName); + if (null == index || null == col) + throw new IllegalArgumentException("Key column not found: " + keyColumnName); + pkSupplier = in.getSupplier(index); + pkColumn = col; + } + + @Override + public Supplier getSupplier(int i) + { + if (i != _lsidColIndex) + return _delegate.getSupplier(i); + return () -> get(i); + } + + @Override + public Object get(int i) + { + Integer rowNumber = asInteger(_delegate.get(0)); + + if (i == _lsidColIndex) + return lsids.get(rowNumber); + + return _delegate.get(i); + } + + @Override + public boolean isConstant(int i) + { + if (i != _lsidColIndex) + return _delegate.isConstant(i); + return false; + } + + @Override + public Object getConstantValue(int i) + { + if (i != _lsidColIndex) + return _delegate.getConstantValue(i); + return null; + } + + protected void prefetchExisting() throws BatchValidationException + { + Integer rowNumber = asInteger(_delegate.get(0)); + if (rowNumber <= lastPrefetchRowNumber) + return; + + lsids.clear(); + + int rowsToFetch = 50; + String keyFieldName = pkColumn.getName(); + boolean numericKey = pkColumn.isNumericType(); + JdbcType jdbcType = pkColumn.getJdbcType(); + Map rowKeyMap = new LinkedHashMap<>(); + Map> keyRowMap = new LinkedHashMap<>(); + Set notFoundKeys = new HashSet<>(); + + do + { + lastPrefetchRowNumber = asInteger(_delegate.get(0)); + Object keyObj = pkSupplier.get(); + Object key = jdbcType.convert(keyObj); + + if (numericKey) + { + if (null == key) + throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); + } + else if (StringUtils.isEmpty((String) key)) + throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); + + rowKeyMap.put(lastPrefetchRowNumber, key); + notFoundKeys.add(key); + // if keyRowMap doesn't contain key, add new set, then add row number to set for this key + if (!keyRowMap.containsKey(key)) + keyRowMap.put(key, new HashSet<>()); + keyRowMap.get(key).add(lastPrefetchRowNumber); + lsids.put(lastPrefetchRowNumber, null); + } + while (--rowsToFetch > 0 && _delegate.next()); + + SimpleFilter filter = new SimpleFilter(ClassId.fieldKey(), _dataClassId); + filter.addCondition(pkColumn.getFieldKey(), rowKeyMap.values(), CompareType.IN); + filter.addCondition(FieldKey.fromParts("Container"), _targetContainer); + + Set columns = Sets.newCaseInsensitiveHashSet(keyFieldName, LSID.name()); + Map[] results = new TableSelector(ExperimentService.get().getTinfoData(), columns, filter, null).getMapArray(); + + for (Map result : results) + { + Object key = result.get(keyFieldName); + Object lsidObj = result.get(LSID.name()); + + Set rowInds = keyRowMap.get(key); + if (lsidObj != null) + { + for (Integer rowInd : rowInds) + lsids.put(rowInd, (String) lsidObj); + notFoundKeys.remove(key); + } + } + + if (!notFoundKeys.isEmpty()) + _context.getErrors().addRowError(new ValidationException("Data not found for " + notFoundKeys)); + + // backup to where we started so caller can iterate through them one at a time + _unwrapped.reset(); // unwrapped _delegate + _delegate.next(); + } + + @Override + public boolean next() throws BatchValidationException + { + if (_context.getErrors().hasErrors()) + return false; + + // NOTE: we have to call mark() before we call next() if we want the 'next' row to be cached + _unwrapped.mark(); // unwrapped _delegate + boolean ret = super.next(); + if (ret) + prefetchExisting(); + return ret; + } +} \ No newline at end of file diff --git a/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java b/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java index c39acf459a1..8816c3a6f55 100644 --- a/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java @@ -153,6 +153,8 @@ public Supplier getSupplier(int i) @Override public Object get(int i) { + assert(i <= existingColIndex); + if (i { enum Column { - RowId, + Alias, + ContentLink, + ClassId, // database table only + CpasType, // database table only + Created, + CreatedBy, + DataClass, + DataFileUrl, + Description, + DownloadLink, + FileExtension, + FileExists, + FileSize, + Flag, + Folder, + Generated, + InlineThumbnail, + Inputs, + LastIndexed, LSID, + Modified, + ModifiedBy, Name, - Description, - DataClass, + ObjectId, // database table only + Outputs, + Properties, Protocol, - SourceProtocolApplication, - SourceApplicationInput, - DataFileUrl, ReferenceCount, Run, RunApplication, RunApplicationOutput, - Created, - CreatedBy, - Modified, - ModifiedBy, - Folder, - Flag, - Alias, - DownloadLink, - ContentLink, - ViewFileLink, + RunId, // database table only + RowId, + SourceApplicationId, // database table only + SourceApplicationInput, + SourceProtocolApplication, Thumbnail, - InlineThumbnail, - FileSize, - FileExists, - FileExtension, + ViewFileLink, ViewOrDownload, WebDavUrl, - WebDavUrlRelative, - Generated, - LastIndexed, - Inputs, - Outputs, - Properties; + WebDavUrlRelative; public FieldKey fieldKey() { diff --git a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java index 6aaa7f8e4bb..77bb1032fd1 100644 --- a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java +++ b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java @@ -16,6 +16,7 @@ package org.labkey.api.query; import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.jetbrains.annotations.NotNull; @@ -122,6 +123,8 @@ import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.Name; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.RowId; import static org.labkey.api.files.FileContentService.UPLOADED_FILE; import static org.labkey.api.util.FileUtil.toFileForRead; import static org.labkey.api.util.FileUtil.toFileForWrite; @@ -453,6 +456,9 @@ protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> updateRows(User user, Container container, List return result; } + protected void validatePartitionedRowKeys(Collection columns) + { + // do nothing + } + + public List> updateRowsUsingPartitionedDIB( + DbScope.Transaction tx, + User user, + Container container, + List> rows, + BatchValidationException errors, + @Nullable Map configParameters, + Map extraScriptContext + ) + { + int index = 0; + int numPartitions = 0; + List> ret = new ArrayList<>(); + + Set observedRowIds = new HashSet<>(); + Set observedNames = new CaseInsensitiveHashSet(); + + while (index < rows.size()) + { + CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); + + validatePartitionedRowKeys(rowKeys); + + int nextIndex = index + 1; + while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) + nextIndex++; + + List> rowsToProcess = rows.subList(index, nextIndex); + index = nextIndex; + numPartitions++; + + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, configParameters); + + // skip audit summary for the partitions, we will perform it once at the end + context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); + + List> subRet = _updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); + + // we need to throw if we don't want executeWithRetry() attempt commit() + if (context.getErrors().hasErrors()) + throw new DbScope.RetryPassthroughException(context.getErrors()); + + if (subRet != null) + { + ret.addAll(subRet); + + // Check if duplicate rows have been processed across the partitions + // Only start checking for duplicates after the first partition has been processed. + if (numPartitions > 1) + { + // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition + checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); + } + + if (errors.hasErrors()) + throw new DbScope.RetryPassthroughException(errors); + } + } + + if (numPartitions > 1) + { + var auditEvent = tx.getAuditEvent(); + if (auditEvent != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); + } + + _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, configParameters), ret.size()); + + return ret; + } + + private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) + { + for (Map row : partitionRows) + { + Long rowId = MapUtils.getLong(row, RowId.name()); + if (rowId != null && !globalRowIds.add(rowId)) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); + return; + } + + Object nameObj = row.get(Name.name()); + if (nameObj != null && !globalNames.add(nameObj.toString())) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); + return; + } + } + } + protected void checkDuplicateUpdate(Object pkVals) throws ValidationException { if (pkVals == null) diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index 1299cea69d9..79f64079d88 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -28,6 +28,7 @@ import org.labkey.api.data.Container; import org.labkey.api.data.ConvertHelper; import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.ImportAliasable; import org.labkey.api.data.JdbcType; import org.labkey.api.data.MvUtil; import org.labkey.api.data.Parameter; @@ -778,14 +779,9 @@ protected boolean validMissingValue(Container c, String mv) return _missingValues.containsKey(mv); } - protected TableInfo getTableInfoForConversion() - { - return getDbTable(); - } - final protected void convertTypes(User user, Container c, Map row) throws ValidationException { - convertTypes(user, c, row, getTableInfoForConversion(), null); + convertTypes(user, c, row, getDbTable(), null); } // TODO Path->FileObject @@ -938,4 +934,21 @@ protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIterator context.setCrossFolderImport(false); } } + + public static @Nullable String getKeyColumnAliasForUpdate(TableInfo tableInfo, @NotNull Map columnNameMap) + { + // Currently, SampleUpdateAddColumnsDataIterator and DataClassUpdateAddColumnsDataIterator is being called before a translator is invoked to + // remap column labels to columns (e.g., "Row Id" -> "RowId"). Due to this, we need to search the + // map of columns for the key column. + var rowIdAliases = ImportAliasable.Helper.createImportSet(tableInfo.getColumn(FieldKey.fromParts("RowId"))); + rowIdAliases.retainAll(columnNameMap.keySet()); + + if (rowIdAliases.size() == 1) + return rowIdAliases.iterator().next(); + if (rowIdAliases.isEmpty()) + return "Name"; + + return null; + } + } diff --git a/experiment/resources/schemas/dbscripts/postgresql/exp-26.005-26.006.sql b/experiment/resources/schemas/dbscripts/postgresql/exp-26.005-26.006.sql new file mode 100644 index 00000000000..dd609550c06 --- /dev/null +++ b/experiment/resources/schemas/dbscripts/postgresql/exp-26.005-26.006.sql @@ -0,0 +1 @@ +SELECT core.executeJavaUpgradeCode('dropProvisionedDataClassLsidColumn'); diff --git a/experiment/resources/schemas/dbscripts/sqlserver/exp-26.005-26.006.sql b/experiment/resources/schemas/dbscripts/sqlserver/exp-26.005-26.006.sql new file mode 100644 index 00000000000..5f8b8c8ae76 --- /dev/null +++ b/experiment/resources/schemas/dbscripts/sqlserver/exp-26.005-26.006.sql @@ -0,0 +1 @@ +EXEC core.executeJavaUpgradeCode 'dropProvisionedDataClassLsidColumn'; diff --git a/experiment/src/client/test/integration/DataClassCrud.ispec.ts b/experiment/src/client/test/integration/DataClassCrud.ispec.ts index 6f0d615f647..e1c5d7dce95 100644 --- a/experiment/src/client/test/integration/DataClassCrud.ispec.ts +++ b/experiment/src/client/test/integration/DataClassCrud.ispec.ts @@ -297,11 +297,11 @@ describe('Import with update / merge', () => { // Issue 52922: Blank / bogus id in the file are getting ignored in update from file let blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "UPDATE", topFolderOptions, editorUserOptions); - expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); + expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_WITH_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "UPDATE", subfolder1Options, editorUserOptions); - expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); + expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_WITH_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\n\tisBlank", dataType, "UPDATE", topFolderOptions, editorUserOptions); - expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); + expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_WITH_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "MERGE", topFolderOptions, editorUserOptions); expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "MERGE", subfolder1Options, editorUserOptions); @@ -422,9 +422,7 @@ describe('Duplicate IDs', () => { }], 'exp.data', dataType, topFolderOptions, editorUserOptions); const data1RowId = caseInsensitive(dataRows[0], 'rowId'); - const data1Lsid = caseInsensitive(dataRows[0], 'lsid'); const data2RowId = caseInsensitive(dataRows[1], 'rowId'); - const data2Lsid = caseInsensitive(dataRows[1], 'lsid'); // update data2 twice using updateRows, using rowId await server.post('query', 'updateRows', { @@ -445,23 +443,42 @@ describe('Duplicate IDs', () => { expect(errorResp['exception']).toBe('Duplicate key provided: ' + data2RowId); }); - // update data2 twice using updateRows, using lsid (data iterator) + // update date twice specifying the name across multiple partitions await server.post('query', 'updateRows', { schemaName: 'exp.data', queryName: dataType, rows: [{ description: 'update', - lsid: data1Lsid + name: dataName1 },{ description: 'update', - lsid: data2Lsid + rowId: data2RowId },{ description: 'update', - lsid: data2Lsid + name: dataName1 }] }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { errorResp = JSON.parse(result.text); - expect(errorResp['exception']).toBe('Duplicate key provided: ' + data2Lsid); + expect(errorResp['exception']).toBe('Duplicate key provided: ' + dataName1); + }); + + // update date twice specifying the rowId across multiple partitions + await server.post('query', 'updateRows', { + schemaName: 'exp.data', + queryName: dataType, + rows: [{ + description: 'update', + rowId: data1RowId + },{ + description: 'update', + name: dataName2 + },{ + description: 'update', + rowId: data1RowId + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toBe('Duplicate key provided: ' + data1RowId); }); errorResp = await ExperimentCRUDUtils.importData(server, "Name\tDescription\n" + dataName1 + "\tupdate\n" + dataName2 + "\tupdate\n" + dataName2 + "\tupdate", dataType, "UPDATE", topFolderOptions, editorUserOptions); @@ -821,6 +838,10 @@ describe('Multi Value Text Choice', () => { }); +const LSID_UPDATE_ERROR = 'LSID is no longer accepted as a key for data update. Specify a RowId or Name instead.'; +const LSID_MERGE_ERROR = 'LSID is no longer accepted as a key for data merge. Specify a RowId or Name instead.'; +const ROWID_MERGE_ERROR = 'RowId is not accepted when merging data. Specify only the data name instead.'; + describe('Data CRUD', () => { it ("Update using different key fields", async () => { @@ -838,20 +859,25 @@ describe('Data CRUD', () => { // insert 2 rows data, provide explicit names and a rowId = -1 const dataName1 = 'KeyData1'; const dataName2 = 'KeyData2'; + const dataName3 = 'KeyData3'; const inserted = await insertDataClassData([ { name: dataName1, description: 'original1', [fieldName]: 'val1', rowId: -1 }, { name: dataName2, description: 'original2', [fieldName]: 'val2', rowId: -1 }, + { name: dataName3, description: 'original3', [fieldName]: 'val3', rowId: -1 }, ], dataType, topFolderOptions); // verify both rows are inserted with correct name and rowId is not -1 for both rows, record the rowId and lsid for both rows expect(inserted[0].name).toBe(dataName1); expect(inserted[1].name).toBe(dataName2); + expect(inserted[2].name).toBe(dataName3); expect(inserted[0].rowId).not.toBe(-1); expect(inserted[1].rowId).not.toBe(-1); + expect(inserted[2].rowId).not.toBe(-1); const row1RowId = inserted[0].rowId; const row1Lsid = inserted[0].lsid; const row2RowId = inserted[1].rowId; const row2Lsid = inserted[1].lsid; + const row3RowId = inserted[2].rowId; const findRow = (rows: any[], rowId: number) => rows.find(r => caseInsensitive(r, 'RowId') === rowId); @@ -869,10 +895,32 @@ describe('Data CRUD', () => { expect(caseInsensitive(row2, 'description')).toBe('updByRowId2'); expect(caseInsensitive(row2, fieldName)).toBe('rowIdVal2'); - // update description and fieldName value for both rows using lsid as key, verify update is successful and data are updated correctly + // Error when supplying LSID without RowId or Name + // query api + await server.post('query', 'updateRows', { + schemaName: 'exp.data', + queryName: dataType, + rows: [ + { lsid: row1Lsid, description: 'updByLsid1', [fieldName]: 'lsidVal1' }, + { lsid: row2Lsid, description: 'updByLsid2', [fieldName]: 'lsidVal2' }, + ] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain(LSID_UPDATE_ERROR); + }); + // update from import + let importUpdateText = 'LSID\tDescription\t' + fieldName + '\n' + row1Lsid + '\timportUpd1\timportLsidVal1\n' + row2Lsid + '\timportUpd2\timportLsidVal2'; + let errorResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorResp.text.indexOf(LSID_UPDATE_ERROR) > -1).toBeTruthy(); + + // merge from import + errorResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorResp.text.indexOf(LSID_MERGE_ERROR) > -1).toBeTruthy(); + + // update using lsid (correct and incorrect, should both be ignored), as well as rowId, as key, should succeed, verify update is successful and data are updated correctly await ExperimentCRUDUtils.updateRows(server, [ - { lsid: row1Lsid, description: 'updByLsid1', [fieldName]: 'lsidVal1' }, - { lsid: row2Lsid, description: 'updByLsid2', [fieldName]: 'lsidVal2' }, + { lsid: row1Lsid, rowId: row1RowId, description: 'updByLsid1', [fieldName]: 'lsidVal1' }, + { lsid: row1Lsid /*wrong lsid, should be ignored anyways*/, rowId: row2RowId, description: 'updByLsid2', [fieldName]: 'lsidVal2' }, ], 'exp.data', dataType, topFolderOptions, editorUserOptions); rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); @@ -882,27 +930,48 @@ describe('Data CRUD', () => { expect(caseInsensitive(row1, fieldName)).toBe('lsidVal1'); expect(caseInsensitive(row2, 'description')).toBe('updByLsid2'); expect(caseInsensitive(row2, fieldName)).toBe('lsidVal2'); + expect(caseInsensitive(row2, 'lsid')).toBe(row2Lsid); // lsid should not be updated - // update description and fieldName value, one of the row use lsid as key, the other use rowId, verify update is successful and data are updated correctly + // update with different set of columns + // should use partitioned data iterator await ExperimentCRUDUtils.updateRows(server, [ - { lsid: row1Lsid, description: 'updMixed1', [fieldName]: 'mixedVal1' }, - { rowId: row2RowId, description: 'updMixed2', [fieldName]: 'mixedVal2' }, + { rowId: row1RowId, description: 'updMixed1', [fieldName]: 'mixedVal1' }, + { rowId: row2RowId, name: 'mixed_rename2', [fieldName]: 'mixedVal2' }, + { rowId: row3RowId, description: 'mixedVal3 desc' }, ], 'exp.data', dataType, topFolderOptions, editorUserOptions); - rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId, row3RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); row1 = findRow(rows, row1RowId); row2 = findRow(rows, row2RowId); + var row3 = findRow(rows, row3RowId); expect(caseInsensitive(row1, 'description')).toBe('updMixed1'); expect(caseInsensitive(row1, fieldName)).toBe('mixedVal1'); - expect(caseInsensitive(row2, 'description')).toBe('updMixed2'); + expect(caseInsensitive(row2, 'description')).toBe('updByLsid2'); expect(caseInsensitive(row2, fieldName)).toBe('mixedVal2'); + expect(caseInsensitive(row2, 'name')).toBe('mixed_rename2'); + expect(caseInsensitive(row3, 'description')).toBe('mixedVal3 desc'); + expect(caseInsensitive(row3, fieldName)).toBe('val3'); // fieldName value should not be updated for row3 + + // update using name as key, should succeed, verify update is successful and data are updated correctly + await ExperimentCRUDUtils.updateRows(server, [ + { name: dataName1, description: 'updByName1', [fieldName]: 'nameVal1' }, + { name: 'mixed_rename2', description: 'updByName2', [fieldName]: 'nameVal2' }, + ], 'exp.data', dataType, topFolderOptions, editorUserOptions); + + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + row1 = findRow(rows, row1RowId); + row2 = findRow(rows, row2RowId); + expect(caseInsensitive(row1, 'description')).toBe('updByName1'); + expect(caseInsensitive(row1, fieldName)).toBe('nameVal1'); + expect(caseInsensitive(row2, 'description')).toBe('updByName2'); + expect(caseInsensitive(row2, fieldName)).toBe('nameVal2'); - // update names of both rows using lsid as key, verify update is successful and names are updated correctly + // update names of both rows using lsid (ignored) an rowId as key, verify update is successful and names are updated correctly const newName1 = 'RenamedByLsid1'; const newName2 = 'RenamedByLsid2'; await ExperimentCRUDUtils.updateRows(server, [ - { lsid: row1Lsid, name: newName1 }, - { lsid: row2Lsid, name: newName2 }, + { lsid: "BAD", rowId: row1RowId, name: newName1 }, + { lsid: row1Lsid /*wrong*/, rowId: row2RowId, name: newName2 }, ], 'exp.data', dataType, topFolderOptions, editorUserOptions); rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, 'RowId,Name', topFolderOptions, adminOptions); @@ -911,7 +980,7 @@ describe('Data CRUD', () => { expect(caseInsensitive(row1, 'Name')).toBe(newName1); expect(caseInsensitive(row2, 'Name')).toBe(newName2); - // update names of both rows using rowId as key, verify update is successful and names are updated correctly + // update names of both rows using just rowId as key, verify update is successful and names are updated correctly const newName3 = 'RenamedByRowId1'; const newName4 = 'RenamedByRowId2'; await ExperimentCRUDUtils.updateRows(server, [ @@ -926,7 +995,7 @@ describe('Data CRUD', () => { expect(caseInsensitive(row2, 'Name')).toBe(newName4); // update description and fieldName value from Import with update, the import columns contains name field, verify update is successful and data are updated correctly - const importUpdateText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\timportUpd1\timportVal1\n' + newName4 + '\timportUpd2\timportVal2'; + importUpdateText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\timportUpd1\timportVal1\n' + newName4 + '\timportUpd2\timportVal2'; const updateResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, 'UPDATE', topFolderOptions, editorUserOptions); expect(updateResp.body.success).toBe(true); @@ -938,6 +1007,12 @@ describe('Data CRUD', () => { expect(caseInsensitive(row2, 'description')).toBe('importUpd2'); expect(caseInsensitive(row2, fieldName)).toBe('importVal2'); + // Error when supplying RowId during MERGE, verify import fails + errorResp = await ExperimentCRUDUtils.importData(server, "RowId\tDescription\n" + row3RowId + "\tupdate\n", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorResp.text).toContain(ROWID_MERGE_ERROR); + errorResp = await ExperimentCRUDUtils.importData(server, "RowId\tName\tDescription\n" + row3RowId + "\t" + dataName3 + "\tupdate\n", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorResp.text).toContain(ROWID_MERGE_ERROR); + // update description and fieldName value from Import with merge. at the same time create a new data. the import columns contain name field, verify update and insert is successful const newDataName = 'MergedNewData'; const importMergeText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\tmergeUpd1\tmergeVal1\n' + newName4 + '\tmergeUpd2\tmergeVal2\n' + newDataName + '\tmergeNew\tmergeNewVal'; @@ -957,6 +1032,60 @@ describe('Data CRUD', () => { expect(caseInsensitive(newDataRow, 'Name')).toBe(newDataName); expect(caseInsensitive(newDataRow, 'description')).toBe('mergeNew'); expect(caseInsensitive(newDataRow, fieldName)).toBe('mergeNewVal'); + + // Update from file, using rowId as key, verify update should be successful and data are updated correctly + const importUpdateRowIdText = 'RowId\tDescription\t' + fieldName + '\n' + row1RowId + '\timportUpdByRowId1\timportValByRowId1\n' + row2RowId + '\timportUpdByRowId2\timportValByRowId2'; + const updateByRowIdResp = await ExperimentCRUDUtils.importData(server, importUpdateRowIdText, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(updateByRowIdResp.body.success).toBe(true); + + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + row1 = findRow(rows, row1RowId); + row2 = findRow(rows, row2RowId); + expect(caseInsensitive(row1, 'description')).toBe('importUpdByRowId1'); + expect(caseInsensitive(row1, fieldName)).toBe('importValByRowId1'); + expect(caseInsensitive(row2, 'description')).toBe('importUpdByRowId2'); + expect(caseInsensitive(row2, fieldName)).toBe('importValByRowId2'); + + // update from file, provide rowId and an updated name, verify name is successfully updated + const newNameByRowId1 = 'RenamedByRowId1Import'; + const newNameByRowId2 = 'RenamedByRowId2Import'; + const importUpdateRowIdNameText = 'RowId\tName\tDescription\n' + row1RowId + '\t' + newNameByRowId1 + '\timportUpdByRowId1-2\n' + row2RowId + '\t' + newNameByRowId2 + '\timportUpdByRowId2-2\n'; + const updateByRowIdNameResp = await ExperimentCRUDUtils.importData(server, importUpdateRowIdNameText, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(updateByRowIdNameResp.body.success).toBe(true); + + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + row1 = findRow(rows, row1RowId); + row2 = findRow(rows, row2RowId); + expect(caseInsensitive(row1, 'Name')).toBe(newNameByRowId1); + expect(caseInsensitive(row1, 'description')).toBe('importUpdByRowId1-2'); + expect(caseInsensitive(row2, 'Name')).toBe(newNameByRowId2); + expect(caseInsensitive(row2, 'description')).toBe('importUpdByRowId2-2'); + + // verify data rowId needs to match provided dataclass type + const emptyDataClass = dataType + "Empty"; + await server.post('property', 'createDomain', { + kind: 'DataClass', + domainDesign: { name: emptyDataClass, fields: [{ name: fieldName }] }, + options: { name: dataType } + }, { ...topFolderOptions, ...designerReaderOptions }).expect(successfulResponse); + + // using query api, update using rowId for data that doesn't exist on the new dataclass should fail. + await server.post('query', 'updateRows', { + schemaName: 'exp.data', + queryName: emptyDataClass, + rows: [{ + description: 'update', + rowId: row3RowId + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain('Data not found for [' + row3RowId + ']'); + }); + + // using update from file, verify update using rowId for data that doesn't exist on this datacalss should fail. + errorResp = await ExperimentCRUDUtils.importData(server, "RowId\tDescription\n" + row3RowId + "\tupdate\n", emptyDataClass, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorResp.text).toContain('Data not found for [' + row3RowId + ']'); + }); }); diff --git a/experiment/src/org/labkey/experiment/ExpDataIterators.java b/experiment/src/org/labkey/experiment/ExpDataIterators.java index b14dacc468e..68045b61013 100644 --- a/experiment/src/org/labkey/experiment/ExpDataIterators.java +++ b/experiment/src/org/labkey/experiment/ExpDataIterators.java @@ -870,9 +870,6 @@ public static void derive(User user, Container container, DataIterator di, boole ExpDataIterators.DerivationDataIteratorBuilder ddib = new ExpDataIterators.DerivationDataIteratorBuilder(di, container, user, isSample, dataType, skipAliquot, true); DataIteratorContext context = new DataIteratorContext(); context.setInsertOption(QueryUpdateService.InsertOption.UPDATE); - Map configParameters = new HashMap<>(); - configParameters.put(ExperimentService.QueryOptions.UseLsidForUpdate, true); - context.setConfigParameters(configParameters); DataIterator derive = ddib.getDataIterator(context); new Pump(derive, context).run(); if (context.getErrors().hasErrors()) @@ -915,7 +912,7 @@ public DataIterator getDataIterator(DataIteratorContext context) else if (_isSample) di = new SampleUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); else - di = new DataUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); + di = new DataUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents);// return LoggingDataIterator.wrap(di); } @@ -1186,7 +1183,7 @@ public boolean next() throws BatchValidationException // For each iteration, collect the parent col values if (hasNext) { - String lsid = (String) get(_lsidCol); + String lsid = (String) get(_lsidCol); // why lsid?, insert or merge String name = null; if (_nameCol != null) name = (String) get(_nameCol); @@ -1449,9 +1446,10 @@ else if (o instanceof Number) private static class DataUpdateDerivationDataIterator extends DerivationDataIteratorBase { - // Map from Data name to Set of (parentColName, parentName) - final Map>> _parentNames; - final boolean _useLsid; + // Map from Data key (RowId or name) to Set of (parentColName, parentName) + final Map>> _parentNames; + final Integer _rowIdCol; + final boolean _useRowId; protected DataUpdateDerivationDataIterator(DataIterator di, DataIteratorContext context, Container container, User user, ExpObject currentDataType, boolean checkRequiredParent) { @@ -1459,7 +1457,8 @@ protected DataUpdateDerivationDataIterator(DataIterator di, DataIteratorContext Map map = DataIteratorUtil.createColumnNameMap(di); _parentNames = new LinkedHashMap<>(); - _useLsid = map.containsKey("lsid") && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); + _rowIdCol = map.getOrDefault(ExpDataTable.Column.RowId.name(), -1); + _useRowId = map.containsKey(ExpDataTable.Column.RowId.name()); } @Override @@ -1474,9 +1473,15 @@ public boolean next() throws BatchValidationException // For each iteration, collect the parent col values if (hasNext) { - String key = null; - if (_useLsid && _lsidCol != null) - key = (String) get(_lsidCol); + Object key = null; + if (_useRowId && _rowIdCol != null) + { + key = get(_rowIdCol); + if (key instanceof String k) + key = Long.parseLong(k); + else + key = asLong(key); + } else if (_nameCol != null) key = (String) get(_nameCol); @@ -1504,9 +1509,11 @@ else if (_nameCol != null) Map dataCache = new LongHashMap<>(); List runRecords = new ArrayList<>(); - for (String key : _parentNames.keySet()) + for (Object key : _parentNames.keySet()) { - ExpData expData = _useLsid ? ExperimentService.get().getExpData(key) : getDataClass().getData(_container, key); + ExpData expData = _useRowId + ? ExperimentService.get().getExpData((Long) key) + : getDataClass().getData(_container, (String) key); if (expData == null) continue; @@ -2428,15 +2435,8 @@ public DataIterator getDataIterator(DataIteratorContext context) } else { - if (isMergeOrUpdate) - { - boolean isUpdateUsingLsid = isUpdateOnly && - colNameMap.containsKey(ExpDataTable.Column.LSID.name()) && - context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); - - if (isUpdateUsingLsid && !canUpdateNames) - dontUpdate.add(ExpDataTable.Column.Name.name()); - } + if (isUpdateOnly && !canUpdateNames) + dontUpdate.add(ExpDataTable.Column.Name.name()); } // Since we support detailed audit logging, add the ExistingRecordDataIterator here just before TableInsertDataIterator. @@ -2700,29 +2700,20 @@ private MultiDataTypeCrossProjectDataIterator(DataIterator di, DataIteratorConte FieldKey dataKey; boolean isNumeric; - if (_isSamples) - { - var foundId = RowId.namesAndLabels().stream() - .filter(map::containsKey) - .findFirst(); + var foundId = RowId.namesAndLabels().stream() + .filter(map::containsKey) + .findFirst(); - if (foundId.isPresent()) - { - index = map.get(foundId.get()); - dataKey = RowId.fieldKey(); - isNumeric = true; - } - else - { - index = map.getOrDefault(Name.name(), -1); - dataKey = Name.fieldKey(); - isNumeric = false; - } + if (foundId.isPresent()) + { + index = map.get(foundId.get()); + dataKey = RowId.fieldKey(); + isNumeric = true; } else { - index = map.getOrDefault(ExpDataTable.Column.Name.name(), -1); - dataKey = ExpDataTable.Column.Name.fieldKey(); + index = map.getOrDefault(Name.name(), -1); + dataKey = Name.fieldKey(); isNumeric = false; } diff --git a/experiment/src/org/labkey/experiment/ExperimentModule.java b/experiment/src/org/labkey/experiment/ExperimentModule.java index 5d452027778..19bc65ba945 100644 --- a/experiment/src/org/labkey/experiment/ExperimentModule.java +++ b/experiment/src/org/labkey/experiment/ExperimentModule.java @@ -205,7 +205,7 @@ public String getName() @Override public Double getSchemaVersion() { - return 26.005; + return 26.006; } @Nullable @@ -283,7 +283,7 @@ protected void init() "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", "Support for querying lineage of experiment objects", false); - OptionalFeatureService.get().addExperimentalFeatureFlag(SampleTypeUpdateServiceDI.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE, "Allow RowId to be accepted when merging samples", + OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE, "Allow RowId to be accepted when merging samples or dataclass data", "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); RoleManager.registerPermission(new DesignVocabularyPermission(), true); diff --git a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java index e693224a7ec..002e9dc2ff6 100644 --- a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java +++ b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java @@ -816,4 +816,115 @@ public static void shortenAllStorageNames(ModuleContext context) if (!badColumnNames.isEmpty()) LOG.error("Some storage column names are still too long!! {}", badColumnNames); } + + /** + * Called from exp-26.004-26.005.sql + * Drop the lsid column from existing provisioned DataClass tables. + */ + @SuppressWarnings("unused") + @DeferredUpgrade + public static void dropProvisionedDataClassLsidColumn(ModuleContext context) + { + if (context.isNewInstall()) + return; + + try (DbScope.Transaction tx = ExperimentService.get().ensureTransaction()) + { + TableInfo source = ExperimentServiceImpl.get().getTinfoDataClass(); + List dataClasses = new TableSelector(source, null, null) + .stream(DataClass.class) + .map(ExpDataClassImpl::new) + .toList(); + + LOG.info("Dropping the lsid column from {} data classes", dataClasses.size()); + + int successCount = 0; + for (ExpDataClassImpl dc : dataClasses) + { + boolean success = dropDataClassLsid(dc); + if (success) + successCount++; + } + + LOG.info("Dropped lsid column from {} of {} data classes successfully.", successCount, dataClasses.size()); + + tx.commit(); + } + } + + private static boolean dropDataClassLsid(ExpDataClassImpl dc) + { + Domain domain = dc.getDomain(); + DataClassDomainKind kind = null; + try + { + kind = (DataClassDomainKind) domain.getDomainKind(); + } + catch (IllegalArgumentException e) + { + // pass + } + if (null == kind || null == kind.getStorageSchemaName()) + return false; + + DbSchema schema = DataClassDomainKind.getSchema(); + + StorageProvisioner.get().ensureStorageTable(domain, kind, schema.getScope()); + domain = PropertyService.get().getDomain(domain.getTypeId()); + assert (null != domain && null != domain.getStorageTableName()); + + SchemaTableInfo provisionedTable = schema.getTable(domain.getStorageTableName()); + if (provisionedTable == null) + { + LOG.error("DataClass '" + dc.getName() + "' (" + dc.getRowId() + ") has no provisioned table."); + return false; + } + + String lsidColumnName = "lsid"; + ColumnInfo lsidColumn = provisionedTable.getColumn(FieldKey.fromParts(lsidColumnName)); + if (lsidColumn == null) + { + LOG.info("No lsid column found on table '{}'. Skipping drop.", provisionedTable.getName()); + return false; + } + + Set indicesToRemove = new HashSet<>(); + for (var index : provisionedTable.getAllIndices()) + { + var indexColumns = index.columns(); + if (indexColumns.contains(lsidColumn)) + { + if (indexColumns.size() > 1) + LOG.info("Dropping index '{}' on table '{}' because it contains the lsid column.", index.name(), provisionedTable.getName()); + + indicesToRemove.add(index.name()); + } + } + + if (!indicesToRemove.isEmpty()) + StorageProvisionerImpl.get().dropTableIndices(domain, indicesToRemove); + else + LOG.info("No indices found on table '{}' that contain the lsid column.", provisionedTable.getName()); + + // Remanufacture a property descriptor that matches the original LSID property descriptor. + var spec = new PropertyStorageSpec(lsidColumnName, JdbcType.VARCHAR, 300).setNullable(false); + PropertyDescriptor pd = new PropertyDescriptor(); + pd.setContainer(dc.getContainer()); + pd.setDatabaseDefaultValue(spec.getDefaultValue()); + pd.setName(spec.getName()); + pd.setJdbcType(spec.getJdbcType(), spec.getSize()); + pd.setNullable(spec.isNullable()); + pd.setMvEnabled(spec.isMvEnabled()); + pd.setPropertyURI(DomainUtil.createUniquePropertyURI(domain.getTypeURI(), null, new CaseInsensitiveHashSet())); + pd.setDescription(spec.getDescription()); + pd.setImportAliases(spec.getImportAliases()); + pd.setScale(spec.getSize()); + DomainPropertyImpl dp = new DomainPropertyImpl((DomainImpl) domain, pd); + + LOG.debug("Dropping lsid column from table '{}' for data class '{}' in folder {}.", provisionedTable.getName(), dc.getName(), dc.getContainer().getPath()); + StorageProvisionerImpl.get().dropProperties(domain, Set.of(dp)); + + return true; + } + } diff --git a/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java b/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java index 63155e503b2..c83453b6618 100644 --- a/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java +++ b/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java @@ -100,7 +100,6 @@ public class DataClassDomainKind extends AbstractDomainKind implements ExpDataClassDataTable @@ -167,8 +174,8 @@ public class ExpDataClassDataTableImpl extends ExpRunItemTableImpl ALLOWED_IMPORT_HEADERS; static { DATA_CLASS_ALT_MERGE_KEYS = new HashSet<>(Arrays.asList(Column.ClassId.name(), Name.name())); - DATA_CLASS_ALT_UPDATE_KEYS = new HashSet<>(Arrays.asList(Column.LSID.name(), Column.RowId.name())); - ALLOWED_IMPORT_HEADERS = new HashSet<>(Arrays.asList("name", "description", "flag", "comment", "alias", "datafileurl")); + DATA_CLASS_ALT_UPDATE_KEYS = new HashSet<>(Arrays.asList(Column.RowId.name())); + ALLOWED_IMPORT_HEADERS = new HashSet<>(Arrays.asList("description", "flag", "comment", "alias", "datafileurl")); } private Map _vocabularyDomainProviders; @@ -689,7 +696,6 @@ public SQLFragment getFromSQLExpanded(String alias, Set selectedColumn // all columns from dataclass property table except key columns Set pCols = new CaseInsensitiveHashSet(provisioned.getColumnNameSet()); pCols.remove("name"); - pCols.remove("lsid"); pCols.remove("rowId"); boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, pCols); @@ -837,12 +843,6 @@ protected SimpleFilter.FilterClause getContainerFilterClause(ContainerFilter fil // UpdatableTableInfo // - @Override - public @Nullable CaseInsensitiveHashSet skipProperties() - { - return super.skipProperties(); - } - @Nullable @Override public CaseInsensitiveHashMap remapSchemaColumns() @@ -859,8 +859,6 @@ public CaseInsensitiveHashMap remapSchemaColumns() @Override public @Nullable Set getAltMergeKeys(DataIteratorContext context) { - if (context.getInsertOption().updateOnly && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - return getAltKeysForUpdate(); return DATA_CLASS_ALT_MERGE_KEYS; } @@ -878,19 +876,23 @@ public Set getAltKeysForUpdate() if (context.getInsertOption().allowUpdate) { - boolean isUpdateUsingLsid = context.getInsertOption().updateOnly && - colNameMap.containsKey(ExpDataTable.Column.LSID.name()) && - context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); - - if (isUpdateUsingLsid) + if (context.getInsertOption().updateOnly) { - keyColumnNames.add(Column.LSID.name()); + // For UPDATE: prefer RowId, else require Name (with ClassId) + if (colNameMap.containsKey(Column.RowId.name())) + keyColumnNames.add(Column.RowId.name()); + else if (colNameMap.containsKey(Name.name())) + { + keyColumnNames.add(Column.ClassId.name()); + keyColumnNames.add(Name.name()); + } + else + throw new IllegalArgumentException("Either RowId or Name is required to update DataClass Data."); } else { - Set altMergeKeys = getAltMergeKeys(context); - if (altMergeKeys != null) - keyColumnNames.addAll(altMergeKeys); + // For MERGE: use merge keys (ClassId + Name) + keyColumnNames.addAll(DATA_CLASS_ALT_MERGE_KEYS); } } @@ -995,7 +997,11 @@ public DataIterator getDataIterator(DataIteratorContext context) if (null == input) return null; // Can happen if context has errors + boolean isMerge = context.getInsertOption() == QueryUpdateService.InsertOption.MERGE; + boolean isUpdate = context.getInsertOption() == QueryUpdateService.InsertOption.UPDATE; + var drop = new CaseInsensitiveHashSet(); + var keysCheck = new CaseInsensitiveHashSet(); for (int i = 1; i <= input.getColumnCount(); i++) { String name = input.getColumnInfo(i).getName(); @@ -1003,18 +1009,44 @@ public DataIterator getDataIterator(DataIteratorContext context) boolean isContainerField = name.equalsIgnoreCase("Container") || name.equalsIgnoreCase("Folder"); if (isContainerField) { - if (context.getInsertOption().updateOnly || !context.isCrossFolderImport()) + if (isUpdate || !context.isCrossFolderImport()) drop.add(name); } + else if (ExpDataTable.Column.Name.name().equalsIgnoreCase(name)) + { + keysCheck.add(ExpDataTable.Column.Name.name()); + } else if (isReservedHeader(name)) + { + if (ExpDataTable.Column.RowId.name().equalsIgnoreCase(name)) + { + keysCheck.add(ExpDataTable.Column.RowId.name()); + if (isUpdate) + continue; + + // While accepting RowId during merge is not our preferred behavior, we want to give users a way + // to opt-in to the old behavior where RowId is accepted and ignored. + if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE)) + { + context.getErrors().addRowError(new ValidationException("RowId is not accepted when merging data. Specify only the data name instead.", ExpMaterialTable.Column.RowId.name())); + return null; + } + } + + if (ExpDataTable.Column.LSID.name().equalsIgnoreCase(name)) + keysCheck.add(ExpDataTable.Column.LSID.name()); drop.add(name); + } + else if (Column.ClassId.name().equalsIgnoreCase(name)) drop.add(name); } - if (context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) + + if ((isMerge || isUpdate) && keysCheck.size() == 1 && keysCheck.contains(LSID.name())) { - drop.remove("lsid"); - drop.remove("rowid");// keep rowid for audit log + String message = String.format("LSID is no longer accepted as a key for data %s. Specify a RowId or Name instead.", isMerge ? "merge" : "update"); + context.getErrors().addRowError(new ValidationException(message, LSID.name())); + return null; } if (!drop.isEmpty()) @@ -1036,21 +1068,38 @@ else if (Column.ClassId.name().equalsIgnoreCase(name)) ColumnInfo cpasTypeCol = expData.getColumn("cpasType"); step0.addColumn(cpasTypeCol, new SimpleTranslator.ConstantColumn(_dataClass.getLSID())); + Map columnNameMap = DataIteratorUtil.createColumnNameMap(input); + if (context.getInsertOption() == QueryUpdateService.InsertOption.UPDATE) { + String keyColumnAlias = getKeyColumnAliasForUpdate(expData, columnNameMap); + if (keyColumnAlias == null) + { + context.getErrors().addRowError(new ValidationException(String.format(DUPLICATE_COLUMN_IN_DATA_ERROR, ExpDataTable.Column.RowId.name()))); + return null; + } + step0.addNullColumn(Column.LSID.name(), JdbcType.VARCHAR); step0.selectAll(); - return LoggingDataIterator.wrap(step0.getDataIterator(context)); + + // add lsid column (for Attachment) but need to re-query it + var added = new DataClassUpdateAddColumnsDataIterator(new CachingDataIterator(step0), context, expData, c ,_dataClass.getRowId(), keyColumnAlias); + return LoggingDataIterator.wrap(added); } step0.selectAll(Sets.newCaseInsensitiveHashSet("lsid", "dataClass", "genId")); //TODO can this be moved up? // Ensure we have a name column -- makes the NameExpressionDataIterator easier - if (!DataIteratorUtil.createColumnNameMap(step0).containsKey("name")) + if (!columnNameMap.containsKey("name")) { ColumnInfo nameCol = expData.getColumn("name"); step0.addColumn(nameCol, (Supplier)() -> null); } + if (Boolean.TRUE.equals(context.getSelectIds()) && !columnNameMap.containsKey(RowId.name())) + { + step0.addNullColumn(RowId.name(), JdbcType.INTEGER); + } + ColumnInfo lsidCol = expData.getColumn("lsid"); // TODO: validate dataFileUrl column, it will be saved later @@ -1269,7 +1318,7 @@ public Map> getExistingRows(User user, Container co Set lsids = new HashSet<>(); for (Map dataRow : dataRows.values()) - lsids.add((String) dataRow.get("lsid")); + lsids.add((String) dataRow.get("lsid")); // ? List seeds = ExperimentServiceImpl.get().getExpDatasByLSID(lsids); ExperimentServiceImpl.get().addRowsParentsFields(new HashSet<>(seeds), dataRows, user, container); @@ -1381,196 +1430,35 @@ else if (name != null) @Override protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - Map result = super.updateRow(user, container, row, oldRow, allowOwner, retainCreation); - - // add MaterialInput/DataInputs field from parent alias - try - { - Map parentAliases = _dataClass.getImportAliases(); - for (String alias : parentAliases.keySet()) - { - if (row.containsKey(alias)) - result.put(parentAliases.get(alias), result.get(alias)); - } - } - catch (IOException e) - { - throw new RuntimeException(e); - } - - return result; - - } - - // DataClassDataUpdateService needs to skip Attachment column convert before _update - // TODO: move override when implementing consolidating dataclass update methods - @Override - protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException { - if (PropertyType.ATTACHMENT == col.getPropertyType()) - return value; - - if (ALIAS_CONCEPT_URI.equals(col.getConceptURI())) - return value; - - return super.convertColumnValue(col, value, user, c, fileLinkDirPath); + throw new UnsupportedOperationException("_update() is no longer supported for dataclass"); } @Override - protected TableInfo getTableInfoForConversion() + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) { - // getDBTable() returns exp.data table, which lacks properties fields. - // TODO: this method can be removed when implementing consolidating dataclass update methods - return getQueryTable(); - } - - @Override - protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) throws SQLException, ValidationException - { - // LSID was stripped by super.updateRows() and is needed to insert into the dataclass provisioned table - String lsid = (String)oldRow.get("lsid"); - if (lsid == null) - throw new ValidationException("lsid required to update row"); - - String newName = (String) row.get(Name.name()); - String oldName = (String) oldRow.get(Name.name()); - boolean hasNameChange = !StringUtils.isEmpty(newName) && !newName.equals(oldName); - - // Replace attachment columns with filename and keep AttachmentFiles - Map rowStripped = new CaseInsensitiveHashMap<>(); - Map attachments = new CaseInsensitiveHashMap<>(); - for (Map.Entry entry : row.entrySet()) - { - String name = entry.getKey(); - Object value = entry.getValue(); - if (isAttachmentProperty(name)) - { - if (value instanceof AttachmentFile file) - { - if (null != file.getFilename()) - { - rowStripped.put(name, file.getFilename()); - attachments.put(name, value); - } - } - else if (value != null && !StringUtils.isEmpty(String.valueOf(value))) - { - // Issue 53498: string value for attachment field is not allowed - throw new ValidationException("Cannot upload '" + value + "' to Attachment type field '" + name + "'."); - } - else - rowStripped.put(name, value); // if null or empty, remove attachment - } - else - { - rowStripped.put(name, value); - } - } - - for (String vocabularyDomainName : getVocabularyDomainProviders().keySet()) - { - DataClassVocabularyProviderProperties fieldVocabularyDomainProvider = getVocabularyDomainProviders().get(vocabularyDomainName); - if (fieldVocabularyDomainProvider != null) - rowStripped.putAll(fieldVocabularyDomainProvider.conceptURIVocabularyDomainProvider().getUpdateRowProperties(user, c, rowStripped, oldRow, getAttachmentParentFactory(), fieldVocabularyDomainProvider.sourceColumnName(), fieldVocabularyDomainProvider.vocabularyDomainName(), getVocabularyDomainProviders().size() > 1)); - } - - // update exp.data - Map ret = new CaseInsensitiveHashMap<>(super._update(user, c, rowStripped, oldRow, keys)); - - Integer rowId = (Integer) oldRow.get("RowId"); - if (rowId == null) - throw new ValidationException("RowId required to update row"); - keys = new Object[] {rowId}; - TableInfo t = _dataClassDataTableSupplier.get(); - if (t.getColumnNameSet().stream().anyMatch(rowStripped::containsKey)) - { - ret.putAll(Table.update(user, t, rowStripped, t.getColumn("rowId"), keys, null, Level.DEBUG)); - } - - ExpDataImpl data = null; - if (hasNameChange) - { - data = ExperimentServiceImpl.get().getExpData(lsid); - ExperimentService.get().addObjectLegacyName(data.getObjectId(), ExperimentServiceImpl.getNamespacePrefix(ExpData.class), oldName, user); - } - - // update comment - if (row.containsKey("flag") || row.containsKey("comment")) - { - Object o = row.containsKey("flag") ? row.get("flag") : row.get("comment"); - String flag = Objects.toString(o, null); - - if (data == null) - data = ExperimentServiceImpl.get().getExpData(lsid); - if (data != null) - data.setComment(user, flag); - } - - // update aliases - if (row.containsKey("Alias")) - AliasInsertHelper.handleInsertUpdate(getContainer(), user, lsid, ExperimentService.get().getTinfoDataAliasMap(), row.get("Alias")); - - // handle attachments - removePreviousAttachments(user, c, row, oldRow); - ret.putAll(attachments); - addAttachments(user, c, ret, lsid); - - // search index done in postcommit - - ret.put("RowId", oldRow.get("RowId")); // return rowId for SearchService - ret.put("lsid", lsid); - return ret; + throw new UnsupportedOperationException("_update() is no longer supported for dataclass"); } @Override public List> updateRows(User user, Container container, List> rows, List> oldKeys, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException { - boolean useDib = false; - if (rows != null && !rows.isEmpty() && oldKeys == null) - useDib = rows.get(0).containsKey("lsid"); - - useDib = useDib && hasUniformKeys(rows); + if (rows == null || rows.isEmpty()) + return Collections.emptyList(); List> results; - if (useDib) - { - Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; - finalConfigParameters.put(ExperimentService.QueryOptions.UseLsidForUpdate, true); + Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; + recordDataIteratorUsed(configParameters); - recordDataIteratorUsed(configParameters); - results = super._updateRowsUsingDIB(user, container, rows, getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters), extraScriptContext); + try + { + results = getSchema().getScope().executeWithRetry(tx -> + updateRowsUsingPartitionedDIB(tx, user, container, rows, errors, finalConfigParameters, extraScriptContext)); } - else + catch (DbScope.RetryPassthroughException retryException) { - results = super.updateRows(user, container, rows, oldKeys, errors, configParameters, extraScriptContext); - - DbScope scope = getUserSchema().getDbSchema().getScope(); - scope.addCommitTask(() -> - { - List orderedRowIds = new ArrayList<>(); - for (Map result : results) - { - Long rowId = MapUtils.getLong(result, RowId.name()); - if (rowId != null) - orderedRowIds.add(rowId); - } - Collections.sort(orderedRowIds); - - // Issue 51263: order by RowId to reduce deadlock - ListUtils.partition(orderedRowIds, 100).forEach(sublist -> - SearchService.get().defaultTask().getQueue(_dataClass.getContainer(), SearchService.PRIORITY.modified).addRunnable((q) -> - { - for (ExpDataImpl expData : ExperimentServiceImpl.get().getExpDatas(sublist)) - expData.index(q, null); - }) - ); - }, DbScope.CommitTaskOption.POSTCOMMIT); - - /* setup mini dataiterator pipeline to process lineage */ - DataIterator di = _toDataIteratorBuilder("updateRows.lineage", results).getDataIterator(new DataIteratorContext()); - ExpDataIterators.derive(user, container, di, false, _dataClass, true); + retryException.rethrow(BatchValidationException.class); + throw retryException.throwRuntimeException(); } return results; @@ -1597,58 +1485,12 @@ protected int truncateRows(User user, Container container) return ExperimentServiceImpl.get().truncateDataClass(_dataClass, user, container); } - private void removePreviousAttachments(User user, Container c, Map newRow, Map oldRow) - { - Lsid lsid = new Lsid((String)oldRow.get("LSID")); - - for (Map.Entry entry : newRow.entrySet()) - { - if (isAttachmentProperty(entry.getKey()) && oldRow.get(entry.getKey()) != null) - { - AttachmentParent parent = new ExpDataClassAttachmentParent(c, lsid); - - AttachmentService.get().deleteAttachment(parent, (String) oldRow.get(entry.getKey()), user); - } - } - } - @Override protected Domain getDomain() { return _dataClass.getDomain(); } - private void addAttachments(User user, Container c, Map row, String lsidStr) - { - if (row != null && lsidStr != null) - { - ArrayList attachmentFiles = new ArrayList<>(); - for (Map.Entry entry : row.entrySet()) - { - if (isAttachmentProperty(entry.getKey()) && entry.getValue() instanceof AttachmentFile file) - { - if (null != file.getFilename()) - attachmentFiles.add(file); - } - } - - if (!attachmentFiles.isEmpty()) - { - Lsid lsid = new Lsid(lsidStr); - AttachmentParent parent = new ExpDataClassAttachmentParent(c, lsid); - - try - { - AttachmentService.get().addAttachments(parent, attachmentFiles, user); - } - catch (IOException e) - { - throw UnexpectedException.wrap(e); - } - } - } - } - @Override public void configureDataIteratorContext(DataIteratorContext context) { @@ -1656,6 +1498,8 @@ public void configureDataIteratorContext(DataIteratorContext context) context.putConfigParameter(QueryUpdateService.ConfigParameters.CheckForCrossProjectData, true); if (context.getInsertOption() == InsertOption.IMPORT || context.getInsertOption() == InsertOption.MERGE) context.setSelectIds(true); // select rowId because provisioned expdataclass.rowId and QueryUpdateAuditEvent.rowPk needs actual rowId + else if (context.getSelectIds() == null && context.getInsertOption() == InsertOption.UPDATE) + context.setSelectIds(false); // for update, don't add RowId if it wasn't in the input (without setSelectIds(false), rowId col will be added if table.hasTriggers by TableInsertUpdateDataIterator } @Override diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java index 749137c049e..877016f47b5 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java @@ -88,6 +88,9 @@ public static AttachmentParentType get() )) .append(" AS Description FROM expdataclass.") .append(domain.getStorageTableName()) + .append(" ds JOIN ") + .append(ExperimentService.get().getTinfoData()) + .append(" d ON d.rowId = ds.rowid") .append(" WHERE ").append(where) ); }); diff --git a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java index c54d96aeec5..004be996f2e 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java @@ -29,6 +29,7 @@ import org.labkey.api.data.SqlSelector; import org.labkey.api.data.Table; import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; import org.labkey.api.exp.ExperimentDataHandler; import org.labkey.api.exp.ExperimentException; import org.labkey.api.exp.Handler; @@ -536,6 +537,12 @@ public String getDocumentId() return "data:" + new Path(getContainer().getId(), dataClassName, Long.toString(getRowId())).encode(); } + @Override + protected TableSelector getObjectPropertiesSelector(@NotNull TableInfo table) + { + return new TableSelector(table, new SimpleFilter(ExpDataTable.Column.RowId.fieldKey(), getRowId()), null); + } + @Override public Map getObjectProperties() { diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index bd8273f8d99..7279bda6fa5 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -161,8 +161,6 @@ public class SampleTypeUpdateServiceDI extends DefaultQueryUpdateService public static final String ROOT_RECOMPUTE_ROWID_SET = "RootIdToRecomputeSet"; public static final String PARENT_RECOMPUTE_NAME_SET = "ParentNameToRecomputeSet"; - public static final String EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE = "org.labkey.experiment.api.SampleTypeUpdateServiceDI#ALLOW_ROW_ID_SAMPLE_MERGE"; - public static final Map SAMPLE_ALT_IMPORT_NAME_COLS; private static final Map ALIQUOT_ROLLUP_FIELDS = Map.of( @@ -533,66 +531,7 @@ public List> updateRows( try { results = getSchema().getDbSchema().getScope().executeWithRetry(tx -> - { - int index = 0; - int numPartitions = 0; - List> ret = new ArrayList<>(); - - Set observedRowIds = new HashSet<>(); - Set observedNames = new CaseInsensitiveHashSet(); - - while (index < rows.size()) - { - CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); - confirmAmountAndUnitsColumns(rowKeys); - - int nextIndex = index + 1; - while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) - nextIndex++; - - List> rowsToProcess = rows.subList(index, nextIndex); - index = nextIndex; - numPartitions++; - - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters); - - // skip audit summary for the partitions, we will perform it once at the end - context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); - - List> subRet = super._updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); - - // we need to throw if we don't want executeWithRetry() attempt commit() - if (context.getErrors().hasErrors()) - throw new DbScope.RetryPassthroughException(context.getErrors()); - - if (subRet != null) - { - ret.addAll(subRet); - - // Check if duplicate rows have been processed across the partitions - // Only start checking for duplicates after the first partition has been processed. - if (numPartitions > 1) - { - // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition - checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); - } - - if (errors.hasErrors()) - throw new DbScope.RetryPassthroughException(errors); - } - } - - if (numPartitions > 1) - { - var auditEvent = tx.getAuditEvent(); - if (auditEvent != null) - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); - } - - _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters), ret.size()); - - return ret; - }); + updateRowsUsingPartitionedDIB(tx, user, container, rows, errors, finalConfigParameters, extraScriptContext)); } catch (DbScope.RetryPassthroughException retryException) { @@ -609,85 +548,10 @@ public List> updateRows( return results; } - private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) - { - for (Map row : partitionRows) - { - Long rowId = MapUtils.getLong(row, RowId.name()); - if (rowId != null && !globalRowIds.add(rowId)) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); - return; - } - - Object nameObj = row.get(Name.name()); - if (nameObj != null && !globalNames.add(nameObj.toString())) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); - return; - } - } - } - - /** - * Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion - */ - @Deprecated @Override - protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) + protected void validatePartitionedRowKeys(Collection columns) { - Map result = new CaseInsensitiveHashMap<>(row.size()); - Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); - Object unitsVal = null; - ColumnInfo unitsCol = null; - Object amountVal = null; - ColumnInfo amountCol = null; - if (row.containsKey(Units.name())) - { - unitsVal = row.get(Units.name()); - unitsCol = columnMap.get(Units.name()); - } - for (String colName : StoredAmount.namesAndLabels()) - { - if (row.containsKey(colName)) - { - amountVal = row.get(colName); - amountCol = columnMap.get(colName); - break; - } - } - if (amountVal != null) - { - String unitsStr = ""; - if (unitsVal != null) - unitsStr = " " + unitsVal; - - providedValues.put(PROVIDED_DATA_PREFIX + StoredAmount.label(), amountVal + unitsStr); - } - - Unit baseUnit = _sampleType != null ? _sampleType.getBaseUnit() : null; - - for (Map.Entry entry : row.entrySet()) - { - ColumnInfo col = columnMap.get(entry.getKey()); - - Object value = entry.getValue(); - if (col != null && col == unitsCol) - { - value = _SamplesCoerceDataIterator.SampleUnitsConvertColumn.getValue(unitsVal, amountVal, amountCol != null, baseUnit, _sampleType == null ? null : _sampleType.getName()); - } - else if (col != null && col == amountCol) - { - value = _SamplesCoerceDataIterator.SampleAmountConvertColumn.getValue(amountVal, unitsCol != null, unitsVal, baseUnit, _sampleType == null ? null : _sampleType.getName()); - } - else - { - value = coerceTypesValue(col, providedValues, entry.getKey(), value); - } - - result.put(entry.getKey(), value); - } - return result; + confirmAmountAndUnitsColumns(columns); } @Override @@ -835,29 +699,9 @@ public static boolean isAliquotStatusChangeNeedRecalc(Collection available @Override protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException { - if (row.containsKey(LSID.name()) && !(row.containsKey(RowId.name()) || row.containsKey(Name.name()))) - throw new ValidationException("Either RowId or Name is required to update a sample."); - - Map result = super.updateRow(user, container, row, oldRow, allowOwner, retainCreation); - - // add MaterialInput/DataInputs field from parent alias - try - { - Map parentAliases = _sampleType.getImportAliases(); - for (String alias : parentAliases.keySet()) - { - if (row.containsKey(alias)) - result.put(parentAliases.get(alias), result.get(alias)); - } - } - catch (IOException e) - { - throw new RuntimeException(e); - } + throw new UnsupportedOperationException("_update() is no longer supported for samples"); - return result; } @Override @@ -1447,7 +1291,7 @@ public DataIterator getDataIterator(DataIteratorContext context) // While accepting RowId during merge is not our preferred behavior, we want to give users a way // to opt-in to the old behavior where RowId is accepted and ignored. - if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE)) + if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE)) { context.getErrors().addRowError(new ValidationException("RowId is not accepted when merging samples. Specify only the sample name instead.", RowId.name())); return null; @@ -1466,8 +1310,6 @@ public DataIterator getDataIterator(DataIteratorContext context) return null; } - if (context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - drop.remove(LSID.name()); if (!drop.isEmpty()) di = new DropColumnsDataIterator(di, drop); @@ -1487,7 +1329,7 @@ public DataIterator getDataIterator(DataIteratorContext context) addAliquotedFrom.addNullColumn(PARENT_RECOMPUTE_NAME_COL, JdbcType.VARCHAR); addAliquotedFrom.selectAll(); - String keyColumnAlias = getKeyColumnAlias(materialTable, columnNameMap); + String keyColumnAlias = getKeyColumnAliasForUpdate(materialTable, columnNameMap); if (keyColumnAlias == null) { context.getErrors().addRowError(new ValidationException(String.format(DUPLICATE_COLUMN_IN_DATA_ERROR, RowId.name()))); @@ -1537,22 +1379,6 @@ public DataIterator getDataIterator(DataIteratorContext context) return LoggingDataIterator.wrap(names); } - private static @Nullable String getKeyColumnAlias(TableInfo materialTable, @NotNull Map columnNameMap) - { - // Currently, SampleUpdateAddColumnsDataIterator is being called before a translator is invoked to - // remap column labels to columns (e.g., "Row Id" -> "RowId"). Due to this, we need to search the - // map of columns for the key column. - var rowIdAliases = ImportAliasable.Helper.createImportSet(materialTable.getColumn(RowId.fieldKey())); - rowIdAliases.retainAll(columnNameMap.keySet()); - - if (rowIdAliases.size() == 1) - return rowIdAliases.iterator().next(); - if (rowIdAliases.isEmpty()) - return Name.name(); - - return null; - } - private static boolean isReservedHeader(String name) { if (isNameHeader(name) || isDescriptionHeader(name) || isCommentHeader(name) || "CpasType".equalsIgnoreCase(name) || isAliasHeader(name)) @@ -1652,18 +1478,13 @@ static class _GenerateNamesDataIterator extends SimpleTranslator _nameState = sampleType.getNameGenState(skipDuplicateCheck, true, _container, user); lsidBuilder = sampleType.generateSampleLSID(); - boolean useLsidForUpdate = context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); - if (useLsidForUpdate) - selectAll(CaseInsensitiveHashSet.of(Name.name(), RootMaterialRowId.name())); - else - selectAll(CaseInsensitiveHashSet.of(Name.name(), LSID.name(), RootMaterialRowId.name())); + selectAll(CaseInsensitiveHashSet.of(Name.name(), LSID.name(), RootMaterialRowId.name())); addColumn(new BaseColumnInfo(Name.fieldKey(), JdbcType.VARCHAR), (Supplier)() -> generatedName); - if (!useLsidForUpdate) - { - DbSequence lsidDbSeq = sampleType.getSampleLsidDbSeq(batchSize, sampleType.getContainer()); - addColumn(new BaseColumnInfo(LSID.name(), JdbcType.VARCHAR), (Supplier) () -> lsidBuilder.setObjectId(String.valueOf(lsidDbSeq.next())).toString()); - } + + DbSequence lsidDbSeq = sampleType.getSampleLsidDbSeq(batchSize, sampleType.getContainer()); + addColumn(new BaseColumnInfo(LSID.name(), JdbcType.VARCHAR), (Supplier) () -> lsidBuilder.setObjectId(String.valueOf(lsidDbSeq.next())).toString()); + addColumn(new BaseColumnInfo(CpasType.fieldKey(), JdbcType.VARCHAR), new SimpleTranslator.ConstantColumn(sampleType.getLSID())); addColumn(new BaseColumnInfo(MaterialSourceId.fieldKey(), JdbcType.INTEGER), new SimpleTranslator.ConstantColumn(sampleType.getRowId())); } diff --git a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java index 0b446196ae0..d089d6c2bc9 100644 --- a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java +++ b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java @@ -309,7 +309,6 @@ protected void importTsvData(FolderImportContext ctx, XarContext xarContext, Str options.put(SampleTypeService.ConfigParameters.DeferAliquotRuns, true); if (isUpdate) options.put(QueryUpdateService.ConfigParameters.SkipRequiredFieldValidation, true); - options.put(ExperimentService.QueryOptions.UseLsidForUpdate, !isUpdate); options.put(ExperimentService.QueryOptions.DeferRequiredLineageValidation, true); context.setConfigParameters(options);