Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
69 commits
Select commit Hold shift + click to select a range
7ecf40c
Initial removal
labkey-nicka Nov 19, 2025
d25ccfd
More removal
labkey-nicka Nov 20, 2025
8bbd758
Various updates
labkey-nicka Nov 20, 2025
7d2b57b
getObjectPropertiesSelector
labkey-nicka Nov 21, 2025
59354cb
nit
labkey-nicka Nov 21, 2025
32077be
Initial ExpDataIterators refactor
labkey-nicka Nov 21, 2025
f363b2c
Restore merge/update dynamic
labkey-nicka Nov 21, 2025
e14d648
Remove LSID support allowUpdate
labkey-nicka Nov 21, 2025
d72b527
Merge v Update Round 12373
labkey-nicka Nov 22, 2025
363ed9b
Resolve keys
labkey-nicka Nov 25, 2025
62043e1
Use getRows
labkey-nicka Nov 25, 2025
7167f3c
Bump @labkey packages
labkey-nicka Nov 25, 2025
ac7ba3a
ExistingRecordDataIterator: check for key columns rather than requiri…
labkey-nicka Nov 25, 2025
7ccfa37
comment
labkey-nicka Nov 25, 2025
fcd6963
Update validation
labkey-nicka Nov 26, 2025
4d842e9
Consistent keys for ExistingRecordDataIterator
labkey-nicka Nov 26, 2025
43b51ee
Revise TriggerDataBuilderHelper
labkey-nicka Nov 26, 2025
ac6fa88
logic
labkey-nicka Nov 26, 2025
5fc9dfc
SampleUpdateNamePolicyDataIterator
labkey-nicka Nov 26, 2025
9828b4b
Test updates
labkey-nicka Nov 27, 2025
ca2be6c
Vocab properties not supported via iterator
labkey-nicka Dec 1, 2025
7b53c5e
Support vocab prop update
labkey-nicka Dec 1, 2025
b35df9a
comments
labkey-nicka Dec 1, 2025
04e9bb1
Upgrade code
labkey-nicka Dec 1, 2025
d3be632
scripts
labkey-nicka Dec 2, 2025
ee42ee9
Bump @labkey packages
labkey-nicka Dec 2, 2025
ad59b89
Let it happen
labkey-nicka Dec 2, 2025
c391af5
Rename checks
labkey-nicka Dec 2, 2025
082ecb5
Support updating alias
labkey-nicka Dec 2, 2025
b7a3261
Match rename support
labkey-nicka Dec 2, 2025
1eb403a
Support external LSID supplier
labkey-nicka Dec 3, 2025
d53e9f6
Bump @labkey packages
labkey-nicka Dec 3, 2025
2beaf10
Address TODOs
labkey-nicka Dec 4, 2025
e0c4c18
Use TableSelector
labkey-nicka Dec 4, 2025
cbc078d
No longer accept LSID for update
labkey-nicka Dec 4, 2025
13a68d1
Add back property column
labkey-nicka Dec 5, 2025
309d2d3
Disallow rowId when merging
labkey-nicka Dec 5, 2025
ac7dd25
Handle "Row Id" labeling
labkey-nicka Dec 8, 2025
1418297
Sample: remove row-by-row update
labkey-nicka Dec 7, 2025
f678d15
New context
labkey-nicka Dec 8, 2025
8e14bb0
Clear ontology property cache with vocab changes
labkey-nicka Dec 8, 2025
59f9e27
No longer include LSID in reselected rows by default
labkey-nicka Dec 8, 2025
ad0d07a
Bump @labkey packages
labkey-nicka Dec 8, 2025
7b1d975
MissingRowIds
labkey-nicka Dec 9, 2025
23eec7c
Handle context errors
labkey-nicka Dec 9, 2025
7f89b63
Test updates
labkey-nicka Dec 9, 2025
467d383
Merge branch 'develop' into fb_remove_sample_lsid
labkey-nicka Dec 9, 2025
a67b923
Bump @labkey packages
labkey-nicka Dec 9, 2025
7efc113
Bump @labkey packages
labkey-nicka Dec 9, 2025
89fc756
nits
labkey-nicka Dec 9, 2025
4c2f4e0
Support cross-folder import rowId
labkey-nicka Dec 10, 2025
f67b9a8
Merge branch 'develop' into fb_remove_sample_lsid
labkey-nicka Dec 10, 2025
b604358
Bump @labkey packages
labkey-nicka Dec 10, 2025
0002384
Bump @labkey packages
labkey-nicka Dec 10, 2025
7c1354e
Convert key type
labkey-nicka Dec 10, 2025
db05b44
Do not update MaterialSourceId
labkey-nicka Dec 10, 2025
01a0376
fix
labkey-nicka Dec 10, 2025
46fe825
Merge branch 'develop' into fb_remove_sample_lsid
labkey-nicka Dec 10, 2025
160471c
Merge branch 'develop' into fb_remove_sample_lsid
labkey-nicka Dec 10, 2025
ac820de
Bump @labkey packages
labkey-nicka Dec 10, 2025
92332d9
Use query table only
labkey-nicka Dec 11, 2025
4c41fa1
Allow RowId during merge
labkey-nicka Dec 11, 2025
2f3da94
ExpMaterialImpl: set MaterialSourceId on insert
labkey-nicka Dec 12, 2025
ae75e1a
Summary audit event, DataIteratorPartitions
labkey-nicka Dec 12, 2025
243c9fb
Merge branch 'develop' into fb_remove_sample_lsid
labkey-nicka Dec 12, 2025
8146976
Experimental feature, check for duplicates
labkey-nicka Dec 12, 2025
2c3d90a
Merge branch 'develop' into fb_remove_sample_lsid
labkey-nicka Dec 12, 2025
42c8bc8
Merge branch 'develop' into fb_remove_sample_lsid
labkey-nicka Dec 13, 2025
b43d547
Bump @labkey packages
labkey-nicka Dec 13, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions api/src/org/labkey/api/audit/SampleTimelineAuditEvent.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@

import static org.labkey.api.audit.AuditHandler.DELTA_PROVIDED_DATA_PREFIX;
import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX;
import static org.labkey.api.exp.query.ExpMaterialTable.Column.StoredAmount;
import static org.labkey.api.exp.query.ExpMaterialTable.Column.Units;
import static org.labkey.api.exp.query.ExpMaterialTable.Column.*;

public class SampleTimelineAuditEvent extends DetailedAuditTypeEvent
{
Expand All @@ -26,7 +25,7 @@ public class SampleTimelineAuditEvent extends DetailedAuditTypeEvent
public static final String AMOUNT_AND_UNIT_UPGRADE_COMMENT = "Storage amount unit conversion to base unit during upgrade script.";

public static final Set<String> EXCLUDED_DETAIL_FIELDS = Set.of(
"AvailableAliquotVolume", "AvailableAliquotCount", "AliquotCount", "AliquotVolume", "AliquotUnit",
AvailableAliquotVolume.name(), AvailableAliquotCount.name(), AliquotCount.name(), AliquotVolume.name(), AliquotUnit.name(),
PROVIDED_DATA_PREFIX + StoredAmount.name(), PROVIDED_DATA_PREFIX + Units.name(),
DELTA_PROVIDED_DATA_PREFIX + StoredAmount.name() + DELTA_PROVIDED_DATA_PREFIX + Units.name());

Expand Down
5 changes: 3 additions & 2 deletions api/src/org/labkey/api/audit/TransactionAuditProvider.java
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ public enum TransactionDetail
Action(false, "The controller-action for this request"),
AuditEvents(true, "The types of audit events generated during the transaction"),
ClientLibrary(false, "The client library (R, Python, etc) used to perform the action"),
DataIteratorPartitions(false, "The number of partitions rows were processed in via data iterator"),
DataIteratorUsed(false, "If data iterator was used for insert/update"),
EditMethod(false, "The method used to insert/update data from the app (e.g., 'DetailEdit', 'GridEdit', etc)"),
ETL(true, "The ETL process name involved in the transaction"),
Expand All @@ -159,7 +160,7 @@ public static TransactionDetail fromString(String key)
return null;
}

public static void addAuditDetails(@NotNull Map<TransactionAuditProvider.TransactionDetail, Object> transactionDetails, @NotNull Map<String, Object> auditDetails)
public static void addAuditDetails(@NotNull Map<TransactionAuditProvider.TransactionDetail, Object> transactionDetails, @NotNull Map<String, Object> auditDetails)
{
if (!auditDetails.isEmpty())
{
Expand All @@ -172,7 +173,7 @@ public static void addAuditDetails(@NotNull Map<TransactionAuditProvider.Transac
}
}

public static void addAuditDetails(@NotNull Map<TransactionAuditProvider.TransactionDetail, Object> transactionDetails, @NotNull String auditDetailsJson)
public static void addAuditDetails(@NotNull Map<TransactionAuditProvider.TransactionDetail, Object> transactionDetails, @NotNull String auditDetailsJson)
{
if (StringUtils.isEmpty(auditDetailsJson))
return;
Expand Down
4 changes: 2 additions & 2 deletions api/src/org/labkey/api/data/DataColumn.java
Original file line number Diff line number Diff line change
Expand Up @@ -259,8 +259,8 @@ public void addQueryFieldKeys(Set<FieldKey> keys)
{
keys.add(_boundColumn.getFieldKey());
StringExpression effectiveURL = _boundColumn.getEffectiveURL();
if (effectiveURL instanceof DetailsURL)
keys.addAll(((DetailsURL) effectiveURL).getFieldKeys());
if (effectiveURL instanceof DetailsURL url)
keys.addAll(url.getFieldKeys());
}
if (_displayColumn != null)
keys.add(_displayColumn.getFieldKey());
Expand Down
27 changes: 7 additions & 20 deletions api/src/org/labkey/api/data/DisplayColumn.java
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@
import java.text.DecimalFormatSymbols;
import java.text.Format;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashSet;
Expand Down Expand Up @@ -283,23 +282,14 @@ public void addQueryFieldKeys(Set<FieldKey> keys)
else if (null != _url)
se = StringExpressionFactory.createURL(_url);

if (se instanceof StringExpressionFactory.FieldKeyStringExpression)
{
Set<FieldKey> fields = ((StringExpressionFactory.FieldKeyStringExpression)se).getFieldKeys();
keys.addAll(fields);
}
if (se instanceof StringExpressionFactory.FieldKeyStringExpression expression)
keys.addAll(expression.getFieldKeys());

if (_urlTitle instanceof StringExpressionFactory.FieldKeyStringExpression)
{
Set<FieldKey> fields = ((StringExpressionFactory.FieldKeyStringExpression) _urlTitle).getFieldKeys();
keys.addAll(fields);
}
if (_urlTitle instanceof StringExpressionFactory.FieldKeyStringExpression expression)
keys.addAll(expression.getFieldKeys());

if (_textExpression instanceof StringExpressionFactory.FieldKeyStringExpression)
{
Set<FieldKey> fields = ((StringExpressionFactory.FieldKeyStringExpression) _textExpression).getFieldKeys();
keys.addAll(fields);
}
if (_textExpression instanceof StringExpressionFactory.FieldKeyStringExpression expression)
keys.addAll(expression.getFieldKeys());

_rowSpanner.addQueryColumns(keys);
}
Expand Down Expand Up @@ -351,13 +341,11 @@ public String getWidth()
return _width;
}


public void setNoWrap(boolean nowrap)
{
_nowrap = nowrap;
}


// Ideally, this would just set the string... and defer creation of the Format object until render time, when we would
// have a Container and other context. That would avoid creating multiple Formats per DisplayColumn.
@Override
Expand All @@ -369,7 +357,6 @@ public void setFormatString(String formatString)
_tsvFormat = createFormat(formatString, tsvFormatSymbols);
}


// java 7 changed to using infinity symbols for formatting, which is challenging for tsv import/export
// use old school "Infinity" for now
static public DecimalFormatSymbols tsvFormatSymbols = new DecimalFormatSymbols();
Expand Down Expand Up @@ -811,7 +798,7 @@ public void renderGridHeaderCell(RenderContext ctx, HtmlWriter out, String heade
if (style == null)
style = "";

// 34871: Support for column display width
// Issue 34871: Support for column display width
if (!isBlank(getWidth()))
style += "; width:" + getWidth() + "px;";

Expand Down
11 changes: 11 additions & 0 deletions api/src/org/labkey/api/data/validator/RequiredValidator.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
*/
package org.labkey.api.data.validator;

import org.jetbrains.annotations.Nullable;
import org.labkey.api.exp.MvFieldWrapper;

/**
Expand All @@ -26,12 +27,19 @@ public class RequiredValidator extends AbstractColumnValidator implements Unders
{
final boolean allowMV;
final boolean allowES;
final String _message;

public RequiredValidator(String columnName, boolean allowMissingValueIndicators, boolean allowEmptyString)
{
this(columnName, allowMissingValueIndicators, allowEmptyString, null);
}

public RequiredValidator(String columnName, boolean allowMissingValueIndicators, boolean allowEmptyString, @Nullable String message)
{
super(columnName);
allowMV = allowMissingValueIndicators;
allowES = allowEmptyString;
_message = message;
}

@Override
Expand Down Expand Up @@ -59,6 +67,9 @@ protected String _validate(int rowNum, Object value)
return null;
}

if (_message != null)
return _message;

// DatasetDefinition.importDatasetData:: errors.add("Row " + rowNumber + " does not contain required field " + col.getName() + ".");
// OntologyManager.insertTabDelimited:: throw new ValidationException("Missing value for required property " + col.getName());
return "Missing value for required property: " + _columnName;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -202,8 +202,11 @@ public static DataIteratorBuilder getAttachmentDataIteratorBuilder(TableInfo ti,
throw new IllegalStateException("Originating data iterator is null");

DataIterator it = builder.getDataIterator(context);
if (it == null)
return null; // can happen if context has errors

Domain domain = ti.getDomain();
if(domain == null)
if (domain == null)
return it;

// find attachment columns
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,19 +139,23 @@ public static DataIteratorBuilder getDataIteratorBuilder(TableInfo queryTable, @
{
return context ->
{
DataIterator it = builder.getDataIterator(context);
if (it == null)
return null; // can happen if context has errors

AuditBehaviorType auditType = AuditBehaviorType.NONE;
if (queryTable.supportsAuditTracking())
auditType = queryTable.getEffectiveAuditBehavior((AuditBehaviorType) context.getConfigParameter(AuditConfigs.AuditBehavior));

// Detailed auditing and not set to bulk load in ETL
if (auditType == DETAILED && !context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad) && !context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.ByPassAudit))
{
DataIterator it = builder.getDataIterator(context);
DataIterator in = DataIteratorUtil.wrapMap(it, true);
return new DetailedAuditLogDataIterator(in, context, queryTable, insertOption.auditAction, user, container, extractProvidedValues);
}

// Nothing to do, so just return input DataIterator
return builder.getDataIterator(context);
return it;
};
}

Expand All @@ -168,5 +172,4 @@ public boolean supportsGetExistingRecord()
{
return _data.supportsGetExistingRecord();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
Expand Down Expand Up @@ -95,15 +94,23 @@ public abstract class ExistingRecordDataIterator extends WrapperDataIterator
var map = DataIteratorUtil.createColumnNameMap(in);
containerCol = map.get("Container");

Collection<String> keyNames = null==keys ? target.getPkColumnNames() : keys;
Set<String> keyNames = new CaseInsensitiveHashSet();
if (keys == null)
keyNames.addAll(target.getPkColumnNames());
else
keyNames.addAll(keys);

if (sharedKeys != null)
_sharedKeys.addAll(sharedKeys);

_dataColumnNames.addAll(detailed ? map.keySet() : keyNames);
if (detailed)
_dataColumnNames.addAll(map.keySet());

for (String name : keyNames)
{
if (!map.containsKey(name))
continue;

Integer index = map.get(name);
ColumnInfo col = target.getColumn(name);
if (null == index || null == col)
Expand All @@ -114,7 +121,11 @@ public abstract class ExistingRecordDataIterator extends WrapperDataIterator
}
pkSuppliers.add(in.getSupplier(index));
pkColumns.add(col);
_dataColumnNames.add(name);
}

if (pkColumns.isEmpty())
throw new IllegalArgumentException("At least one key column is required.");
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,28 +5,25 @@
import org.labkey.api.collections.Sets;
import org.labkey.api.data.ColumnInfo;
import org.labkey.api.data.CompareType;
import org.labkey.api.data.JdbcType;
import org.labkey.api.data.SimpleFilter;
import org.labkey.api.data.TableInfo;
import org.labkey.api.data.TableSelector;
import org.labkey.api.exp.api.ExperimentService;
import org.labkey.api.query.BatchValidationException;
import org.labkey.api.query.FieldKey;
import org.labkey.api.util.StringUtilsLabKey;

import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;

import static org.labkey.api.exp.query.ExpMaterialTable.Column.*;
import static org.labkey.api.util.IntegerUtils.asInteger;

public class SampleUpdateAddColumnsDataIterator extends WrapperDataIterator
{
public static final String ALIQUOTED_FROM_LSID_COLUMN_NAME = "AliquotedFromLSID";
public static final String ROOT_ROW_ID_COLUMN_NAME = "RootMaterialRowId";
public static final String CURRENT_SAMPLE_STATUS_COLUMN_NAME = "_CurrentSampleState_";
static final String KEY_COLUMN_NAME = "Name";
static final String KEY_COLUMN_LSID = "LSID";

final CachingDataIterator _unwrapped;
final TableInfo target;
Expand All @@ -36,35 +33,29 @@ public class SampleUpdateAddColumnsDataIterator extends WrapperDataIterator
final int _aliquotedFromColIndex;
final int _rootMaterialRowIdColIndex;
final int _currentSampleStateColIndex;
final boolean _useLsid;

// prefetch of existing records
int lastPrefetchRowNumber = -1;
final IntHashMap<String> aliquotParents = new IntHashMap<>();
final IntHashMap<Integer> aliquotRoots = new IntHashMap<>();
final IntHashMap<Integer> sampleState = new IntHashMap<>();

public SampleUpdateAddColumnsDataIterator(DataIterator in, TableInfo target, long sampleTypeId, boolean useLsid)
public SampleUpdateAddColumnsDataIterator(DataIterator in, TableInfo target, long sampleTypeId, String keyColumnName)
{
super(in);

this._unwrapped = (CachingDataIterator)in;

this.target = target;

this._sampleTypeId = sampleTypeId;
this._useLsid = useLsid;

var map = DataIteratorUtil.createColumnNameMap(in);
this._aliquotedFromColIndex = map.get(ALIQUOTED_FROM_LSID_COLUMN_NAME);
this._rootMaterialRowIdColIndex = map.get(ROOT_ROW_ID_COLUMN_NAME);
this._aliquotedFromColIndex = map.get(AliquotedFromLSID.name());
this._rootMaterialRowIdColIndex = map.get(RootMaterialRowId.name());
this._currentSampleStateColIndex = map.get(CURRENT_SAMPLE_STATUS_COLUMN_NAME);

String keyCol = useLsid ? KEY_COLUMN_LSID : KEY_COLUMN_NAME;
Integer index = map.get(keyCol);
ColumnInfo col = target.getColumn(keyCol);
Integer index = map.get(keyColumnName);
ColumnInfo col = target.getColumn(keyColumnName);
if (null == index || null == col)
throw new IllegalArgumentException("Key column not found: " + keyCol);
throw new IllegalArgumentException("Key column not found: " + keyColumnName);
pkSupplier = in.getSupplier(index);
pkColumn = col;
}
Expand Down Expand Up @@ -119,20 +110,24 @@ protected void prefetchExisting() throws BatchValidationException
sampleState.clear();

int rowsToFetch = 50;
Map<Integer, String> rowKeyMap = new LinkedHashMap<>();
Map<String, Integer> keyRowMap = new LinkedHashMap<>();
String keyFieldName = pkColumn.getName();
boolean numericKey = pkColumn.isNumericType();
JdbcType jdbcType = pkColumn.getJdbcType();
Map<Integer, Object> rowKeyMap = new LinkedHashMap<>();
Map<Object, Integer> keyRowMap = new LinkedHashMap<>();
do
{
lastPrefetchRowNumber = asInteger(_delegate.get(0));
Object keyObj = pkSupplier.get();
Object key = jdbcType.convert(keyObj);

String key = null;
if (keyObj instanceof String)
key = StringUtilsLabKey.unquoteString((String) keyObj);
else if (keyObj instanceof Number)
key = keyObj.toString();
if (StringUtils.isEmpty(key))
throw new IllegalArgumentException(KEY_COLUMN_NAME + " value not provided on row " + lastPrefetchRowNumber);
if (numericKey)
{
if (null == key)
throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber);
}
else if (StringUtils.isEmpty((String) key))
throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber);

rowKeyMap.put(lastPrefetchRowNumber, key);
keyRowMap.put(key, lastPrefetchRowNumber);
Expand All @@ -142,20 +137,19 @@ else if (keyObj instanceof Number)
}
while (--rowsToFetch > 0 && _delegate.next());

String keyCol = _useLsid ? KEY_COLUMN_LSID : KEY_COLUMN_NAME;
SimpleFilter filter = new SimpleFilter(FieldKey.fromParts("MaterialSourceId"), _sampleTypeId);
FieldKey keyField = FieldKey.fromParts(keyCol);
filter.addCondition(keyField, rowKeyMap.values(), CompareType.IN);
SimpleFilter filter = new SimpleFilter(MaterialSourceId.fieldKey(), _sampleTypeId);
filter.addCondition(pkColumn.getFieldKey(), rowKeyMap.values(), CompareType.IN);
filter.addCondition(FieldKey.fromParts("Container"), target.getUserSchema().getContainer());

Map<String, Object>[] results = new TableSelector(ExperimentService.get().getTinfoMaterial(), Sets.newCaseInsensitiveHashSet(keyCol, "aliquotedfromlsid", "rootMaterialRowId", "sampleState"), filter, null).getMapArray();
Set<String> columns = Sets.newCaseInsensitiveHashSet(keyFieldName, AliquotedFromLSID.name(), RootMaterialRowId.name(), SampleState.name());
Map<String, Object>[] results = new TableSelector(ExperimentService.get().getTinfoMaterial(), columns, filter, null).getMapArray();

for (Map<String, Object> result : results)
{
String key = (String) result.get(keyCol);
Object aliquotedFromLSIDObj = result.get("aliquotedFromLSID");
Object rootMaterialRowIdObj = result.get("rootMaterialRowId");
Object sampleStateObj = result.get("sampleState");
Object key = result.get(keyFieldName);
Object aliquotedFromLSIDObj = result.get(AliquotedFromLSID.name());
Object rootMaterialRowIdObj = result.get(RootMaterialRowId.name());
Object sampleStateObj = result.get(SampleState.name());
Integer rowInd = keyRowMap.get(key);
if (aliquotedFromLSIDObj != null)
aliquotParents.put(rowInd, (String) aliquotedFromLSIDObj);
Expand All @@ -180,5 +174,4 @@ public boolean next() throws BatchValidationException
prefetchExisting();
return ret;
}

}
Loading