Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 71 additions & 11 deletions api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@
import org.labkey.api.study.StudyService;
import org.labkey.api.study.assay.ParticipantVisitResolver;
import org.labkey.api.study.publish.StudyPublishService;
import org.labkey.api.util.IntegerUtils;
import org.labkey.api.util.PageFlowUtil;
import org.labkey.api.util.ResultSetUtil;
import org.labkey.api.util.UnexpectedException;
Expand Down Expand Up @@ -517,7 +518,7 @@ public boolean next() throws BatchValidationException
}
Map<ExpMaterial, String> rowBasedInputMaterials = new LinkedHashMap<>();

DataIterator fileData = checkData(container, user, dataTable, dataDomain, iter, settings, resolver, protocolInputMaterials, cf, rowBasedInputMaterials);
DataIterator fileData = checkData(container, user, provider, protocol, dataTable, dataDomain, iter, settings, resolver, protocolInputMaterials, cf, rowBasedInputMaterials);
fileData = convertPropertyNamesToURIs(fileData, dataDomain);

OntologyManager.RowCallback rowCallback = NO_OP_ROW_CALLBACK;
Expand Down Expand Up @@ -682,16 +683,18 @@ private void checkColumns(Domain dataDomain, DataIterator rawData) throws BatchV
* @param rowBasedInputMaterials the map of materials that are inputs to this run based on the data rows
*/
private DataIterator checkData(
Container container,
User user,
TableInfo dataTable,
Domain dataDomain,
DataIterator rawData,
DataLoaderSettings settings,
ParticipantVisitResolver resolver,
Map<String, ExpMaterial> inputMaterials,
ContainerFilter containerFilter,
Map<ExpMaterial, String> rowBasedInputMaterials
Container container,
User user,
AssayProvider provider,
ExpProtocol protocol,
TableInfo dataTable,
Domain dataDomain,
DataIterator rawData,
DataLoaderSettings settings,
ParticipantVisitResolver resolver,
Map<String, ExpMaterial> inputMaterials,
ContainerFilter containerFilter,
Map<ExpMaterial, String> rowBasedInputMaterials
) throws BatchValidationException
{
final ExperimentService exp = ExperimentService.get();
Expand All @@ -705,10 +708,14 @@ private DataIterator checkData(
DomainProperty visitPropFinder = null;
DomainProperty datePropFinder = null;
DomainProperty targetStudyPropFinder = null;
DomainProperty platePropFinder = null;
DomainProperty wellLocationPropFinder = null;
DomainProperty wellLsidPropFinder = null;

RemapCache cache = new RemapCache();
Map<DomainProperty, TableInfo> remappableLookup = new HashMap<>();
Map<Long, ExpMaterial> materialCache = new LongHashMap<>();
Map<Long, Map<String, Long>> plateWellCache = new LongHashMap<>();

Map<DomainProperty, ExpSampleType> lookupToSampleTypeByName = new HashMap<>();
Map<DomainProperty, ExpSampleType> lookupToSampleTypeById = new HashMap<>();
Expand All @@ -717,6 +724,7 @@ private DataIterator checkData(

List<? extends DomainProperty> columns = dataDomain.getProperties();
Map<DomainProperty, List<ColumnValidator>> validatorMap = new HashMap<>();
boolean isPlateMetadataEnabled = provider.isPlateMetadataEnabled(protocol);

for (DomainProperty pd : columns)
{
Expand Down Expand Up @@ -748,6 +756,25 @@ else if (pd.getName().equalsIgnoreCase(AbstractAssayProvider.TARGET_STUDY_PROPER
{
targetStudyPropFinder = pd;
}
else if (isPlateMetadataEnabled &&
pd.getName().equalsIgnoreCase("WellLocation") &&
pd.getPropertyDescriptor().getPropertyType() == PropertyType.STRING)
{
wellLocationPropFinder = pd;
}
else if (isPlateMetadataEnabled &&
pd.getName().equalsIgnoreCase("WellLsid") &&
pd.getPropertyDescriptor().getPropertyType() == PropertyType.STRING)
{
wellLsidPropFinder = pd;
}
else if (isPlateMetadataEnabled &&
pd.getName().equalsIgnoreCase("Plate") &&
pd.getPropertyDescriptor().isLookup())
{
platePropFinder = pd;
}

else
{
var sampleLookup = AssaySampleLookupContext.checkSampleLookup(container, user, pd);
Expand Down Expand Up @@ -794,6 +821,11 @@ else if (sampleLookup.isLookup())
DomainProperty visitPD = visitPropFinder;
DomainProperty datePD = datePropFinder;
DomainProperty targetStudyPD = targetStudyPropFinder;
DomainProperty platePD = platePropFinder;
DomainProperty wellLocationPD = wellLocationPropFinder;
DomainProperty wellLsidPD = wellLsidPropFinder;

boolean resolvePlateSamples = isPlateMetadataEnabled && platePD != null && wellLocationPD != null && wellLsidPD != null;

return DataIteratorUtil.mapTransformer(rawData, inputCols ->
{
Expand Down Expand Up @@ -1042,6 +1074,34 @@ else if (validatorMap.containsKey(pd))
}
}

// Wire up well samples as materials inputs
if (resolvePlateSamples)
{
Long plateId = IntegerUtils.asLong(map.get(platePD.getName()));
String wellLocation = (String) map.get(wellLocationPD.getName());
Long sampleId = null;
ExpMaterial material = null;

if (plateId != null && wellLocation != null)
{
Map<String, Long> wellSampleCache = plateWellCache.computeIfAbsent(plateId, (id) -> AssayPlateMetadataService.get().getWellLocationToSampleIdMap(container, user, plateId));
sampleId = wellSampleCache.get(wellLocation);
}

if (sampleId != null)
{
material = materialCache.computeIfAbsent(sampleId, (id) -> exp.getExpMaterial(id, containerFilter));
}

if (material != null)
{
// Note: we have to use the wellLsidPD as the Property Input Lineage Role because we resolve
// the material inputs for a plate assay based on WellLsid during delete.
rowBasedInputMaterials.putIfAbsent(material, AssayService.get().getPropertyInputLineageRole(wellLsidPD));
rowInputLSIDs.add(material.getLSID());
}
}

if (!errors.isEmpty())
throw new RuntimeValidationException(new ValidationException(errors, rowNum));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import org.jetbrains.annotations.Nullable;
import org.labkey.api.assay.AssayProvider;
import org.labkey.api.assay.AssayRunUploadContext;
import org.labkey.api.data.ColumnInfo;
import org.labkey.api.data.Container;
import org.labkey.api.data.ContainerManager;
import org.labkey.api.data.TableInfo;
Expand Down Expand Up @@ -183,6 +184,13 @@ void applyHitSelectionCriteria(
List<Long> runIds
) throws ValidationException;

/**
* Returns a Map of Well Location to Sample RowID for a given Plate ID.
*/
Map<String, Long> getWellLocationToSampleIdMap(Container container, User user, Long plateId);

boolean isWellLookup(ColumnInfo col);

/**
* Should only be used to get a local instance of a plate schema where a contextual role might be involved. Schemas created this way are not cached,
* and all other usages should retrieve schemas from the QueryService.
Expand Down
46 changes: 44 additions & 2 deletions api/src/org/labkey/api/assay/sample/AssaySampleLookupContext.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,18 @@

import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.labkey.api.assay.AssayProtocolSchema;
import org.labkey.api.assay.AssayProvider;
import org.labkey.api.assay.AssayService;
import org.labkey.api.assay.plate.AssayPlateMetadataService;
import org.labkey.api.data.ColumnInfo;
import org.labkey.api.data.CompareType;
import org.labkey.api.data.Container;
import org.labkey.api.data.SQLFragment;
import org.labkey.api.data.SimpleFilter;
import org.labkey.api.data.SqlSelector;
import org.labkey.api.data.TableInfo;
import org.labkey.api.exp.api.ExpProtocol;
import org.labkey.api.exp.api.ExpProtocolApplication;
import org.labkey.api.exp.api.ExpRun;
import org.labkey.api.exp.api.ExpSampleType;
Expand Down Expand Up @@ -93,6 +96,33 @@ public void trackSampleLookupChange(Container container, User user, TableInfo ta
}
}

/**
* Keeps track of experiment runs for assays that have columns with a sample lookup, including plate metadata
* enabled assays. Useful in data updates of assay run/result domains where sample lookups are subsequently
* reflected as material inputs to an experiment run.
* @param container Container from which to resolve sample lookup information.
* @param user User to utilize to resolve sample lookup information.
* @param table The table to utilize to resole sample lookup information.
* @param schema The AssayProtocolSchema associated with the run.
* @param run The experiment run to track.
*/
public void trackSampleLookupChange(Container container, User user, TableInfo table, AssayProtocolSchema schema, ExpRun run)
{
if (table.getDomain() == null) return;

for (DomainProperty dp : table.getDomain().getNonBaseProperties())
trackSampleLookupChange(container, user, table, table.getColumn(dp.getName()), run);

if (_runIds.contains(run.getRowId()))
return;

AssayProvider provider = schema.getProvider();
ExpProtocol protocol = schema.getProtocol();

if (provider.isPlateMetadataEnabled(protocol))
_runIds.add(run.getRowId());
}

/**
* Check if a domain property is considered a valid sample lookup.
* @param container Container from which to resolve sample lookup information.
Expand All @@ -112,8 +142,9 @@ private static SampleLookup checkSampleLookup(Container container, User user, @N

ExpSampleType sampleType = ExperimentService.get().getLookupSampleType(dp, container, user);
boolean isSampleLookup = sampleType != null || ExperimentService.get().isLookupToMaterials(dp);
boolean isWellLookup = dp.getName().equalsIgnoreCase("WellLsid") && col != null && AssayPlateMetadataService.get().isWellLookup(col);

return new SampleLookup(isSampleLookup, col, dp, sampleType);
return new SampleLookup(isSampleLookup || isWellLookup, col, dp, sampleType);
}

private SampleLookup checkSampleLookup(Container container, User user, TableInfo table, ColumnInfo col)
Expand Down Expand Up @@ -384,7 +415,18 @@ private SQLFragment getLookupColumnSql(TableInfoKey tableInfoKey, SampleLookup s
var role = AssayService.get().getPropertyInputLineageRole(sampleLookup.domainProperty);
var sql = new SQLFragment();

if (column.getJdbcType().isInteger())
if (AssayPlateMetadataService.get().isWellLookup(column))
{
// tableSql selects all the Well LSIDs for the given runId, we then select all the SampleIds from the
// assay.well table that match those Well LSIDs.
tableSql = QueryService.get().getSelectBuilder(table)
.columns(Set.of(column))
.filter(tableFilter)
.buildSqlFragment();
sql.append("SELECT WS.SampleId as MaterialRowId, ").appendValue(role).append(" AS MaterialInputRole\n");
sql.append("FROM assay.well WS WHERE LSID IN (").append(tableSql).append(")");
}
else if (column.getJdbcType().isInteger())
{
sql.append("SELECT DA.").appendIdentifier(column.getAlias()).append(" AS MaterialRowId");
sql.append(", ?").add(role).append(" AS MaterialInputRole\n");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -368,11 +368,7 @@ protected Map<String, Object> deleteRow(
}

// Issue 51126: need to track and resync run/sample lineage on delete in the same way we do for update
if (datatableInfo.getDomain() != null)
{
for (DomainProperty dp : datatableInfo.getDomain().getNonBaseProperties())
_assaySampleLookupContext.trackSampleLookupChange(container, user, datatableInfo, datatableInfo.getColumn(dp.getName()), run);
}
_assaySampleLookupContext.trackSampleLookupChange(container, user, datatableInfo, _schema, run);

return result;
}
Expand Down
8 changes: 4 additions & 4 deletions assay/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion assay/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"clean": "rimraf resources/web/assay/gen && rimraf resources/views/gen && rimraf resources/web/gen"
},
"dependencies": {
"@labkey/components": "6.63.1"
"@labkey/components": "6.64.0"
},
"devDependencies": {
"@labkey/build": "8.6.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@
import org.labkey.api.util.logging.LogHelper;
import org.labkey.api.view.ActionURL;
import org.labkey.assay.TSVProtocolSchema;
import org.labkey.assay.plate.data.WellData;
import org.labkey.assay.plate.model.WellBean;
import org.labkey.assay.plate.query.PlateSchema;
import org.labkey.assay.plate.query.PlateTable;
Expand Down Expand Up @@ -1517,6 +1518,32 @@ public String format(FieldKey fieldKey)
return new PlateSchema(querySchema, contextualRoles);
}

@Override
public Map<String, Long> getWellLocationToSampleIdMap(Container container, User user, Long plateId)
{
Map<String, Long> wellLocationToSampleIdMap = new HashMap<>();
List<WellData> wellData = PlateManager.get().getWellData(container, user, plateId, true, false);

for (WellData data : wellData)
wellLocationToSampleIdMap.put(data.getPosition(), data.getSampleId());

return wellLocationToSampleIdMap;
}

@Override
public boolean isWellLookup(ColumnInfo col)
{
if (col == null) return false;

if (!col.isLookup()) return false;

var wellTable = AssayDbSchema.getInstance().getTableInfoWell();
var lookupTable = col.getFkTableInfo();

return lookupTable.getSchema().getName().equalsIgnoreCase(wellTable.getSchema().getName())
&& lookupTable.getName().equalsIgnoreCase(wellTable.getName());
}

private static class PlateMetadataImportHelper extends SimpleAssayDataImportHelper
{
private final Map<Long, Map<Position, Lsid>> _wellPositionMap; // map of plate position to well table
Expand Down
8 changes: 4 additions & 4 deletions core/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
}
},
"dependencies": {
"@labkey/components": "6.63.1",
"@labkey/components": "6.64.0",
"@labkey/themes": "1.4.2"
},
"devDependencies": {
Expand Down
8 changes: 4 additions & 4 deletions experiment/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion experiment/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"test-integration": "cross-env NODE_ENV=test jest --ci --runInBand -c test/js/jest.config.integration.js"
},
"dependencies": {
"@labkey/components": "6.63.1"
"@labkey/components": "6.64.0"
},
"devDependencies": {
"@labkey/build": "8.6.0",
Expand Down
Loading