Skip to content
33 changes: 16 additions & 17 deletions api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@
import java.util.function.Function;

import static java.util.stream.Collectors.toList;
import static org.labkey.api.assay.AssayRunUploadContext.ReImportOption.MERGE_DATA;
import static org.labkey.api.exp.OntologyManager.NO_OP_ROW_CALLBACK;
import static org.labkey.api.gwt.client.ui.PropertyType.SAMPLE_CONCEPT_URI;

Expand Down Expand Up @@ -237,8 +238,9 @@ private DataIteratorBuilder parsePlateData(
Container container = context.getContainer();
User user = context.getUser();

Integer plateSetId = getPlateSetValueFromRunProps(context, provider, protocol);
DataIteratorBuilder dataRows = AssayPlateMetadataService.get().parsePlateData(container, user, ((AssayUploadXarContext)context).getContext(), data, provider,
AssayRunUploadContext<?> runUploadContext = ((AssayUploadXarContext)context).getContext();
Integer plateSetId = AssayPlateMetadataService.get().getPlateSetId(runUploadContext, provider, protocol);
DataIteratorBuilder dataRows = AssayPlateMetadataService.get().parsePlateData(container, user, runUploadContext, data, provider,
protocol, plateSetId, dataFile, settings);

// assays with plate metadata support will merge the plate metadata with the data rows to make it easier for
Expand All @@ -251,21 +253,6 @@ private DataIteratorBuilder parsePlateData(
return dataRows;
}

@Nullable
private Integer getPlateSetValueFromRunProps(XarContext context, AssayProvider provider, ExpProtocol protocol) throws ExperimentException
{
Domain runDomain = provider.getRunDomain(protocol);
DomainProperty propertyPlateSet = runDomain.getPropertyByName(AssayPlateMetadataService.PLATE_SET_COLUMN_NAME);
if (propertyPlateSet == null)
{
throw new ExperimentException("The assay run domain for the assay '" + protocol.getName() + "' does not contain a plate set property.");
}

Map<DomainProperty, String> runProps = ((AssayUploadXarContext)context).getContext().getRunProperties();
Object plateSetVal = runProps.getOrDefault(propertyPlateSet, null);
return plateSetVal != null ? Integer.parseInt(String.valueOf(plateSetVal)) : null;
}

/**
* Creates a DataLoader that can handle missing value indicators if the columns on the domain
* are configured to support it.
Expand Down Expand Up @@ -610,7 +597,19 @@ protected void insertRowData(
{
OntologyManager.UpdateableTableImportHelper importHelper = new SimpleAssayDataImportHelper(data, protocol, provider);
if (provider.isPlateMetadataEnabled(protocol))
{
if (context.getReRunId() != null)
{
// check if we are merging the re-imported data
if (context.getReImportOption() == MERGE_DATA)
{
DataIteratorBuilder mergedData = AssayPlateMetadataService.get().mergeReRunData(container, user, context, fileData, provider, protocol, data);
fileData = DataIteratorUtil.wrapMap(mergedData.getDataIterator(new DataIteratorContext()), false);
}
}

importHelper = AssayPlateMetadataService.get().getImportHelper(container, user, run, data, protocol, provider, context);
}

if (tableInfo instanceof UpdateableTableInfo uti)
{
Expand Down
29 changes: 29 additions & 0 deletions api/src/org/labkey/api/assay/plate/AssayPlateMetadataService.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import org.labkey.api.data.Container;
import org.labkey.api.data.ContainerManager;
import org.labkey.api.data.TableInfo;
import org.labkey.api.dataiterator.DataIterator;
import org.labkey.api.dataiterator.DataIteratorBuilder;
import org.labkey.api.exp.ExperimentException;
import org.labkey.api.exp.Lsid;
Expand Down Expand Up @@ -66,6 +67,34 @@ DataIteratorBuilder mergePlateMetadata(
ExpProtocol protocol
) throws ExperimentException;

/**
* Takes the current incoming data and combines it with any data uploaded in the previous run (re-run ID). Data
* can be combined for plates within a plate set, but only on a per plate boundary. If there is data for plates
* in both sets of data, the most recent data will take precedence.
*
* @param results The incoming data rows
* @return The new, combined data
*/
DataIteratorBuilder mergeReRunData(
Container container,
User user,
@NotNull AssayRunUploadContext<?> context,
DataIterator results,
AssayProvider provider,
ExpProtocol protocol,
ExpData data
) throws ExperimentException;

/**
* Returns the plate set ID for the current run context.
*/
@Nullable
Integer getPlateSetId(
AssayRunUploadContext<?> context,
AssayProvider provider,
ExpProtocol protocol
) throws ExperimentException;

/**
* Handles the validation and parsing of the plate data (or data file) including plate graphical formats as
* well as cases where plate identifiers have not been supplied.
Expand Down
43 changes: 38 additions & 5 deletions api/src/org/labkey/api/assay/transform/DataExchangeHandler.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
package org.labkey.api.assay.transform;

import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.labkey.api.assay.AssayProvider;
import org.labkey.api.assay.AssayRunUploadContext;
import org.labkey.api.data.TSVWriter;
Expand All @@ -37,15 +38,47 @@

/**
* Used to process input and output data between the server and externally executed qc and analysis scripts.
* User: Karl Lum
* Date: Jan 7, 2009
*/
public interface DataExchangeHandler
{
Pair<FileLike, Set<FileLike>> createTransformationRunInfo(AssayRunUploadContext<? extends AssayProvider> context, ExpRun run, FileLike scriptDir, Map<DomainProperty, String> runProperties, Map<DomainProperty, String> batchProperties) throws Exception;
void createSampleData(@NotNull ExpProtocol protocol, ViewContext viewContext, FileLike scriptDir) throws Exception;
/**
* Create and serialize the run properties information that is made available to transform scripts.
* The file contains a variety of information based on the transform operation being specified.
*
* @param operation The transform operation being performed
* @param context Contains information about the import or update context
* @param scriptDir The folder that the transform script will be run in.
* @return The map of the run properties file to the set of other data files associated with the operation
* being performed.
*/
Pair<FileLike, Set<FileLike>> createTransformationRunInfo(
DataTransformService.TransformOperation operation,
AssayRunUploadContext<? extends AssayProvider> context,
@Nullable ExpRun run,
FileLike scriptDir,
Map<DomainProperty, String> runProperties,
Map<DomainProperty, String> batchProperties
) throws Exception;

TransformResult processTransformationOutput(AssayRunUploadContext<? extends AssayProvider> context, FileLike runInfo, ExpRun run, FileLike scriptFile, TransformResult mergeResult, Set<FileLike> inputDataFiles) throws ValidationException;
/**
* Creates a test version of the run properties file for download
*/
void createSampleData(
DataTransformService.TransformOperation operation,
@NotNull ExpProtocol protocol,
ViewContext viewContext,
FileLike scriptDir
) throws Exception;

TransformResult processTransformationOutput(
DataTransformService.TransformOperation operation,
AssayRunUploadContext<? extends AssayProvider> context,
FileLike runInfo,
@Nullable ExpRun run,
FileLike scriptFile,
TransformResult mergeResult,
Set<FileLike> inputDataFiles
) throws ValidationException;

DataSerializer getDataSerializer();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ public static DataTransformService get()
public static final String BASE_SERVER_URL_REPLACEMENT = "baseServerURL";
public static final String CONTAINER_PATH = "containerPath";
public static final String ORIGINAL_SOURCE_PATH = "OriginalSourcePath";
public static final String TRANSFORM_OPERATION = "transformOperation";

public enum TransformOperation
{
Expand Down Expand Up @@ -134,7 +135,7 @@ public TransformResult transformAndValidate(

Bindings bindings = engine.getBindings(ScriptContext.ENGINE_SCOPE);
String script = sb.toString();
Pair<FileLike, Set<FileLike>> files = dataHandler.createTransformationRunInfo(context, run, scriptDir, runProperties, batchProperties);
Pair<FileLike, Set<FileLike>> files = dataHandler.createTransformationRunInfo(operation, context, run, scriptDir, runProperties, batchProperties);
FileLike runInfo = files.getKey();

bindings.put(ExternalScriptEngine.WORKING_DIRECTORY, scriptDir.toNioPathForWrite().toString());
Expand All @@ -144,14 +145,15 @@ public TransformResult transformAndValidate(

// Issue 51543: Resolve windows path to run properties
paramMap.put(RUN_INFO_REPLACEMENT, runInfo.toNioPathForWrite().toFile().getAbsolutePath().replaceAll("\\\\", "/"));
paramMap.put(TRANSFORM_OPERATION, operation.name());

addStandardParameters(context.getRequest(), context.getContainer(), scriptFile, session.getApiKey(), paramMap);

bindings.put(ExternalScriptEngine.PARAM_REPLACEMENT_MAP, paramMap);

Object output = engine.eval(script);

FileLike rewrittenScriptFile = null;
FileLike rewrittenScriptFile;
if (bindings.get(ExternalScriptEngine.REWRITTEN_SCRIPT_FILE) instanceof File)
{
var rewrittenScriptFileObject = bindings.get(ExternalScriptEngine.REWRITTEN_SCRIPT_FILE);
Expand All @@ -166,7 +168,7 @@ public TransformResult transformAndValidate(
}

// process any output from the transformation script
result = dataHandler.processTransformationOutput(context, runInfo, run, rewrittenScriptFile, result, files.getValue());
result = dataHandler.processTransformationOutput(operation, context, runInfo, run, rewrittenScriptFile, result, files.getValue());

// Propagate any transformed batch properties on to the next script
if (result.getBatchProperties() != null && !result.getBatchProperties().isEmpty())
Expand Down
Loading