Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 4 additions & 8 deletions api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.json.JSONArray;
import org.labkey.api.action.ApiUsageException;
import org.labkey.api.assay.plate.AssayPlateMetadataService;
import org.labkey.api.assay.sample.AssaySampleLookupContext;
import org.labkey.api.collections.CaseInsensitiveHashMap;
Expand Down Expand Up @@ -598,14 +597,11 @@ protected void insertRowData(
OntologyManager.UpdateableTableImportHelper importHelper = new SimpleAssayDataImportHelper(data, protocol, provider);
if (provider.isPlateMetadataEnabled(protocol))
{
if (context.getReRunId() != null)
// check if we are merging the re-imported data
if (context != null && context.getReRunId() != null && context.getReImportOption() == MERGE_DATA)
{
// check if we are merging the re-imported data
if (context.getReImportOption() == MERGE_DATA)
{
DataIteratorBuilder mergedData = AssayPlateMetadataService.get().mergeReRunData(container, user, context, fileData, provider, protocol, data);
fileData = DataIteratorUtil.wrapMap(mergedData.getDataIterator(new DataIteratorContext()), false);
}
DataIteratorBuilder mergedData = AssayPlateMetadataService.get().mergeReRunData(container, user, context, fileData, provider, protocol, data);
fileData = DataIteratorUtil.wrapMap(mergedData.getDataIterator(new DataIteratorContext()), false);
}

importHelper = AssayPlateMetadataService.get().getImportHelper(container, user, run, data, protocol, provider, context);
Expand Down
29 changes: 10 additions & 19 deletions api/src/org/labkey/api/audit/AbstractAuditTypeProvider.java
Original file line number Diff line number Diff line change
Expand Up @@ -222,20 +222,11 @@ private void ensureProperties(User user, Domain domain)
}
}

// #26311 We want to trigger a save if the scale has changed
// Issue 26311: We want to trigger a save if the scale has changed
// CONSIDER: check for other differences here as well.
private boolean differ(PropertyDescriptor pd, DomainProperty dp, Container c)
{
return dp.getScale() != pd.getScale()
|| !dp.getRangeURI().equals(pd.getRangeURI())
// || !dp.getLabel().equals(pd.getLabel())
// || dp.isRequired() != pd.isRequired()
// || dp.isHidden() != pd.isHidden()
// || dp.isMvEnabled() != pd.isMvEnabled()
// || dp.getDefaultValueTypeEnum() != pd.getDefaultValueTypeEnum()
;


return dp.getScale() != pd.getScale() || !dp.getRangeURI().equals(pd.getRangeURI());
}

private void copyTo(DomainProperty dp, PropertyDescriptor pd, Container c)
Expand Down Expand Up @@ -321,7 +312,7 @@ protected void appendValueMapColumns(AbstractTableInfo table, String eventName,
MutableColumnInfo oldCol = table.getMutableColumn(FieldKey.fromString(OLD_RECORD_PROP_NAME));
MutableColumnInfo newCol = table.getMutableColumn(FieldKey.fromString(NEW_RECORD_PROP_NAME));

if(oldCol != null)
if (oldCol != null)
{
var added = table.addColumn(new AliasedColumn(table, "OldValues", oldCol));
added.setDisplayColumnFactory(DataMapColumn::new);
Expand All @@ -330,7 +321,7 @@ protected void appendValueMapColumns(AbstractTableInfo table, String eventName,
oldCol.setHidden(true);
}

if(newCol != null)
if (newCol != null)
{
var added = table.addColumn(new AliasedColumn(table, "NewValues", newCol));
added.setDisplayColumnFactory(DataMapColumn::new);
Expand Down Expand Up @@ -390,16 +381,16 @@ public static String encodeForDataMap(Map<String, ?> properties)
entry.getKey().equals(ExperimentService.ALIASCOLUMNALIAS))
continue;
Object value = entry.getValue();
if (value instanceof Time)
if (value instanceof Time time)
{
String formatted = DateUtil.formatIsoLongTime((Time)value);
String formatted = DateUtil.formatIsoLongTime(time);
stringMap.put(entry.getKey(), formatted);
}
else if (value instanceof Date)
else if (value instanceof Date date)
{
// issue: 35002 - normalize Date values to avoid Timestamp/Date toString differences
// issue: 36472 - use iso format to show date-time values
String formatted = DateUtil.toISO((Date)value);
// Issue 35002 - normalize Date values to avoid Timestamp/Date toString differences
// Issue 36472 - use iso format to show date-time values
String formatted = DateUtil.toISO(date);
stringMap.put(entry.getKey(), formatted);
}
else
Expand Down
12 changes: 6 additions & 6 deletions api/src/org/labkey/api/audit/AuditTypeEvent.java
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,6 @@
/**
* Bean object to capture audit log entries. Will be used to populate the database tables via get/set methods that
* align with column names in the corresponding provisioned table.
* User: klum
* Date: 7/12/13
*/
public class AuditTypeEvent
{
Expand All @@ -53,7 +51,7 @@ public class AuditTypeEvent
private User _createdBy;
private Date _modified;
private User _modifiedBy;
private String userComment;
private String _userComment;
private Long _transactionId;

public AuditTypeEvent(@NotNull String eventType, @NotNull Container container, @Nullable String comment)
Expand All @@ -69,7 +67,9 @@ public AuditTypeEvent(@NotNull String eventType, @NotNull Container container, @
}

/** Important for reflection-based instantiation */
public AuditTypeEvent(){}
public AuditTypeEvent()
{
}

public long getRowId()
{
Expand Down Expand Up @@ -173,12 +173,12 @@ public void setModifiedBy(User modifiedBy)

public void setUserComment(String userComment)
{
this.userComment = userComment;
_userComment = userComment;
}

public String getUserComment()
{
return this.userComment;
return _userComment;
}

public Long getTransactionId()
Expand Down
3 changes: 0 additions & 3 deletions api/src/org/labkey/api/data/triggers/Trigger.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,6 @@
import java.util.stream.Collectors;

/**
* User: kevink
* Date: 12/21/15
*
* Trigger scripts are invoked before insert/update/delete on many LabKey tables.
* The Trigger is created by a TriggerFactory added to AbstractTableInfo.
*/
Expand Down
8 changes: 4 additions & 4 deletions assay/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion assay/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"clean": "rimraf resources/web/assay/gen && rimraf resources/views/gen && rimraf resources/web/gen"
},
"dependencies": {
"@labkey/components": "6.61.1"
"@labkey/components": "6.62.7"
},
"devDependencies": {
"@labkey/build": "8.6.0",
Expand Down
5 changes: 5 additions & 0 deletions assay/src/org/labkey/assay/AssayModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.labkey.api.assay.plate.PlateService;
import org.labkey.api.assay.plate.PlateUtils;
import org.labkey.api.assay.plate.PositionImpl;
import org.labkey.api.audit.AuditLogService;
import org.labkey.api.cache.CacheManager;
import org.labkey.api.data.Container;
import org.labkey.api.data.ContainerManager;
Expand Down Expand Up @@ -87,6 +88,8 @@
import org.labkey.assay.plate.PlateReplicateStatsDomainKind;
import org.labkey.assay.plate.PlateSetDocumentProvider;
import org.labkey.assay.plate.TsvPlateLayoutHandler;
import org.labkey.assay.plate.audit.PlateAuditProvider;
import org.labkey.assay.plate.audit.PlateSetAuditProvider;
import org.labkey.assay.plate.query.PlateSchema;
import org.labkey.assay.plate.query.PlateSchemaTest;
import org.labkey.assay.plate.query.PlateTypeTable;
Expand Down Expand Up @@ -203,6 +206,8 @@ protected void startupAfterSpringConfig(ModuleContext moduleContext)
return result;
});
PlateManager.get().registerLsidHandlers();
AuditLogService.get().registerAuditType(new PlateSetAuditProvider());
AuditLogService.get().registerAuditType(new PlateAuditProvider());
SearchService ss = SearchService.get();

// ASSAY_CATEGORY
Expand Down
7 changes: 3 additions & 4 deletions assay/src/org/labkey/assay/PlateController.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
*/
package org.labkey.assay;

import org.apache.commons.lang3.ArrayUtils;
import org.apache.logging.log4j.Logger;
import org.json.JSONArray;
import org.json.JSONObject;
Expand Down Expand Up @@ -278,9 +277,9 @@ public void validateCommand(NameForm target, Errors errors)
@Override
public boolean handlePost(NameForm form, BindException errors) throws Exception
{
Plate template = PlateService.get().getPlate(getContainer(), form.getPlateId());
if (template != null && template.getRowId() != null)
PlateService.get().deletePlate(getContainer(), getUser(), template.getRowId());
Plate plate = PlateService.get().getPlate(getContainer(), form.getPlateId());
if (plate != null && plate.getRowId() != null)
PlateService.get().deletePlate(getContainer(), getUser(), plate.getRowId());
return true;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ private PlateSet createPlateSet(PlateSetType plateSetType, @Nullable PlateSet pa
_plateSetsCreated++;
CPUTimer timer = new CPUTimer("Plate");
timer.start();
PlateSet result = PlateManager.get().createPlateSet(getContainer(), getUser(), plateSet, plates, parentPlateSet != null ? parentPlateSet.getRowId() : null);
PlateSet result = PlateManager.get().createPlateSet(getContainer(), getUser(), plateSet, plates, parentPlateSet != null ? parentPlateSet.getRowId() : null, null);
timer.stop();
_plateTimings.add(Double.valueOf((double) timer.getTotalMilliseconds() / plates.size()));

Expand Down
33 changes: 20 additions & 13 deletions assay/src/org/labkey/assay/plate/AssayPlateMetadataServiceImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ public DataIteratorBuilder mergePlateMetadata(
@Override
public Map<String, Object> apply(Map<String, Object> row)
{
// ensure the result data includes a wellLocation field with values like : A1, F12, etc
// ensure the result data includes a wellLocation field with position value (e.g., A1, F12, etc.)
Object wellLocation = PropertyService.get().getDomainPropertyValueFromRow(wellLocationProperty, row);
if (wellLocation == null)
throw new RuntimeValidationException("Imported data must contain a WellLocation column to support plate metadata integration.");
Expand All @@ -179,11 +179,14 @@ public Map<String, Object> apply(Map<String, Object> row)
if (plateIdentifier == null)
throw new RuntimeValidationException("Unable to resolve plate identifier for results row (" + rowCounter + ").");

Plate plate = PlateService.get().getPlate(cf, plateSetId, plateIdentifier);
if (plate == null)
throw new RuntimeValidationException("Unable to resolve the plate \"" + plateIdentifier + "\" for the results row (" + rowCounter + ").");
plateIdentifierMap.computeIfAbsent(plateIdentifier, k -> {
Plate plate = PlateService.get().getPlate(cf, plateSetId, plateIdentifier);
if (plate == null)
throw new RuntimeValidationException("Unable to resolve the plate \"" + plateIdentifier + "\" for the results row (" + rowCounter + ").");

plateIdentifierMap.putIfAbsent(plateIdentifier, new Pair<>(plate, new HashMap<>()));
return Pair.of(plate, new HashMap<>());
});
Plate plate = plateIdentifierMap.get(plateIdentifier).first;

// if the plate identifier is the plate name, we need to make sure it resolves during importRows
// so replace it with the plateId (which will be unique)
Expand All @@ -205,12 +208,14 @@ public Map<String, Object> apply(Map<String, Object> row)
}

if (!wellSamples.isEmpty())
{
// stash away any samples associated with the plate
ExperimentService.get().getExpMaterials(wellSamples).forEach(s -> sampleMap.put(s.getRowId(), s));
}
}

PositionImpl well = new PositionImpl(null, String.valueOf(wellLocation));
// need to adjust the column value to be 0 based to match the template locations
// need to adjust the column value to be 0-based to match the template locations
well.setColumn(well.getColumn() - 1);

if (!positionToWell.containsKey(well))
Expand Down Expand Up @@ -386,7 +391,7 @@ public DataIteratorBuilder mergeReRunData(
throw new ExperimentException(String.format("Unable to query the assay results for protocol : %s", protocol.getName()));

// The plate identifier is either a row ID or plate ID on incoming data, need to match that when merging existing data.
FieldKey plateFieldKey = FieldKey.fromParts(AssayResultDomainKind.Column.Plate.name());
FieldKey plateFieldKey = AssayResultDomainKind.Column.Plate.fieldKey();
// Note that in the case where there is a transform script on the assay design, the LK data parsing might not have
// found any rows, and we might be deferring to the transform script to do that parsing. This block of code should
// be able to proceed in that case by just passing through all run results to the transform script for the run being replaced.
Expand Down Expand Up @@ -1507,7 +1512,7 @@ public String format(FieldKey fieldKey)
}

@Override
public UserSchema getPlateSchema(QuerySchema querySchema, Set<Role> contextualRoles)
public @NotNull UserSchema getPlateSchema(QuerySchema querySchema, Set<Role> contextualRoles)
{
return new PlateSchema(querySchema, contextualRoles);
}
Expand All @@ -1524,7 +1529,6 @@ private static class PlateMetadataImportHelper extends SimpleAssayDataImportHelp
private final ExpProtocol _protocol;
private final AssayProvider _provider;
private final AssayRunUploadContext<?> _context;
private DomainProperty _stateProp;

public PlateMetadataImportHelper(
ExpData data,
Expand Down Expand Up @@ -1556,7 +1560,7 @@ public void bindAdditionalParameters(Map<String, Object> map, ParameterMapStatem

Domain runDomain = _provider.getRunDomain(_protocol);
Domain resultDomain = _provider.getResultsDomain(_protocol);
_stateProp = AssayPlateMetadataServiceImpl.getAssayStateProp(resultDomain);
DomainProperty stateProp = AssayPlateMetadataServiceImpl.getAssayStateProp(resultDomain);
DomainProperty plateSetProperty = runDomain.getPropertyByName(AssayPlateMetadataService.PLATE_SET_COLUMN_NAME);
DomainProperty plateProperty = resultDomain.getPropertyByName(AssayResultDomainKind.Column.Plate.name());
DomainProperty wellLocationProperty = resultDomain.getPropertyByName(AssayResultDomainKind.Column.WellLocation.name());
Expand Down Expand Up @@ -1629,7 +1633,7 @@ public void bindAdditionalParameters(Map<String, Object> map, ParameterMapStatem
// Validate any data state values on the row. No hit selection / data state processing is done on import
// because at this time transform script hit selection is not supported nor is there any intersection
// in the re-import case yet.
validateRowDataStates(_container, map, _stateProp);
validateRowDataStates(_container, map, stateProp);
}

/**
Expand All @@ -1650,11 +1654,12 @@ public void afterBatchInsert(int rowCount)

AssayProtocolSchema schema = _provider.createProtocolSchema(_user, _container, _protocol, null);
TableInfo resultsTable = schema.createDataTable(null, false);
boolean isReimport = isExistingRun();

// Re-select any hits that were present in the previous run, this works in conjunction with the code in
// mergeReRunData where previous hits are removed for any data unchanged by the new incoming data. At this
// point any remaining hits should represent selections we plan to move forward to the new run
if (isExistingRun())
if (isReimport)
{
ExpRun prevRun = ExperimentService.get().getExpRun(_context.getReRunId());
if (prevRun != null)
Expand All @@ -1677,6 +1682,8 @@ public void afterBatchInsert(int rowCount)

AssayPlateMetadataService.get().applyHitSelectionCriteria(_container, _user, _protocol, resultsTable, List.of(_run.getRowId()));

PlateManager.get().addPlateImportAuditEvents(_container, _user, tx, _plateIdentifierMap.values().stream().toList(), _run, isReimport);

tx.commit();
}
catch (Throwable e)
Expand Down Expand Up @@ -1711,7 +1718,7 @@ public void testGridAnnotations() throws Exception
new PlateManager.PlateData(null, plateType.getRowId(), null, null, Collections.emptyList())
);

PlateSet plateSet = PlateManager.get().createPlateSet(container, user, new PlateSetImpl(), plates, null);
PlateSet plateSet = PlateManager.get().createPlateSet(container, user, new PlateSetImpl(), plates, null, null);
List<Plate> plateSetPlates = PlateManager.get().getPlatesForPlateSet(plateSet);
assertEquals("Expected two plates to be created.", 2, plateSetPlates.size());
Plate plate = plateSetPlates.get(0);
Expand Down
Loading