Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.labkey.api.data.Table;
import org.labkey.api.data.TableInfo;
import org.labkey.api.data.TableSelector;
import org.labkey.api.data.WorkbookContainerType;
import org.labkey.api.exp.api.DataType;
import org.labkey.api.exp.api.ExpData;
import org.labkey.api.exp.api.ExperimentService;
Expand Down Expand Up @@ -490,7 +491,7 @@ public boolean accept(File dir, String name)
Assert.assertEquals("Incorrect number of outputs created", expectedOutputs.size(), files.size());
}

protected Set<PipelineJob> createPipelineJob(String jobName, JSONObject config, SequenceAnalysisController.AnalyzeForm.TYPE type) throws Exception
protected Set<PipelineJob> createPipelineJob(String jobName, JSONObject config, SequenceAnalysisController.AnalyzeForm.TYPE type, boolean createNewWorkbook) throws Exception
{
Map<String, Object> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
Expand All @@ -503,7 +504,13 @@ protected Set<PipelineJob> createPipelineJob(String jobName, JSONObject config,
json.put("type", type.name());
String requestContent = json.toString();

HttpServletRequest request = ViewServlet.mockRequest(RequestMethod.POST.name(), DetailsURL.fromString("/sequenceanalysis/startPipelineJob.view").copy(_project).getActionURL(), _context.getUser(), headers, requestContent);
Container pipelineJobContainer = _project;
if (createNewWorkbook)
{
pipelineJobContainer = ContainerManager.createContainer(_project, null, "Workbook: " + jobName, null, WorkbookContainerType.NAME, TestContext.get().getUser());
}

HttpServletRequest request = ViewServlet.mockRequest(RequestMethod.POST.name(), DetailsURL.fromString("/sequenceanalysis/startPipelineJob.view").copy(pipelineJobContainer).getActionURL(), _context.getUser(), headers, requestContent);

MockHttpServletResponse response = ViewServlet.mockDispatch(request, null);
JSONObject responseJson = new JSONObject(response.getContentAsString());
Expand Down Expand Up @@ -783,7 +790,7 @@ public void basicTest() throws Exception

appendSamplesForImport(config, List.of(g));

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport, true);
waitForJobs(jobs);

Set<File> expectedOutputs = new HashSet<>();
Expand Down Expand Up @@ -828,7 +835,7 @@ public void leaveInPlaceTest() throws Exception
appendSamplesForImport(config, List.of(g));
config.put("inputFileTreatment", "leaveInPlace");

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport, false);
waitForJobs(jobs);

Set<File> expectedOutputs = new HashSet<>();
Expand Down Expand Up @@ -906,7 +913,7 @@ private void runMergePipelineJob(String jobName, boolean deleteIntermediates, St
config.put("inputfile.runFastqc", true);
appendSamplesForImport(config, Arrays.asList(g, g2, g3));

Set<PipelineJob> jobsUnsorted = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport);
Set<PipelineJob> jobsUnsorted = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport, true);
waitForJobs(jobsUnsorted);

List<PipelineJob> jobs = new ArrayList<>(jobsUnsorted);
Expand Down Expand Up @@ -1117,7 +1124,7 @@ public void barcodeTest() throws Exception
String[] fileNames = new String[]{DUAL_BARCODE_FILENAME};

JSONObject config = getBarcodeConfig(jobName, fileNames, prefix);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport, true);
waitForJobs(jobs);

File basedir = getBaseDir(jobs.iterator().next());
Expand Down Expand Up @@ -1166,7 +1173,7 @@ public void barcodeTestDeletingIntermediates() throws Exception
config.put("deleteIntermediateFiles", true);
config.put("inputFileTreatment", "compress");

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport, true);
waitForJobs(jobs);

File basedir = getBaseDir(jobs.iterator().next());
Expand Down Expand Up @@ -1227,7 +1234,7 @@ public void pairedEndTest() throws Exception

appendSamplesForImport(config, List.of(g));

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport, true);
waitForJobs(jobs);

Set<File> expectedOutputs = new HashSet<>();
Expand Down Expand Up @@ -1281,7 +1288,7 @@ public void pairedEndTestMovingInputs() throws Exception

appendSamplesForImport(config, List.of(g));

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport, false);
waitForJobs(jobs);

Set<File> expectedOutputs = new HashSet<>();
Expand Down Expand Up @@ -1337,7 +1344,7 @@ public void pairedEndTestDeletingInputs() throws Exception

appendSamplesForImport(config, List.of(g));

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.readsetImport, true);
waitForJobs(jobs);

Set<File> expectedOutputs = new HashSet<>();
Expand Down Expand Up @@ -1834,7 +1841,7 @@ public void testMosaik() throws Exception

appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

validateInputs();
Expand Down Expand Up @@ -1882,7 +1889,7 @@ public void testMosaikWithBamPostProcessing() throws Exception

appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

List<String> extraFiles = new ArrayList<>();
Expand Down Expand Up @@ -1981,7 +1988,7 @@ public void testMosaikWithBamPostProcessingAndDelete() throws Exception

appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, false);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2040,7 +2047,7 @@ public void testMosaikDeletingIntermediates() throws Exception
config.put("deleteIntermediateFiles", true);
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2094,7 +2101,7 @@ public void testBWASW() throws Exception
config.put("alignment", "BWA-SW");
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2158,7 +2165,7 @@ public void testBWAMem() throws Exception
config.put("alignment", "BWA-Mem");
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2226,7 +2233,7 @@ public void testBWAWithAdapters() throws Exception

appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2327,7 +2334,7 @@ public void testBWA() throws Exception
config.put("alignment", "BWA");
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2397,7 +2404,7 @@ public void testBowtie() throws Exception
config.put("alignment", "Bowtie");
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2468,7 +2475,7 @@ public void testBowtieDeletingIntermediates() throws Exception
config.put("deleteIntermediateFiles", true);
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2538,7 +2545,7 @@ public void testBwaMemWithSavedLibrary() throws Exception
config.put("referenceLibraryCreation.SavedLibrary.libraryId", libraryId);
appendSamplesForAlignment(config, Collections.singletonList(_readsets.get(0)));

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

//we expect the index to get copied back to the reference library location
Expand Down Expand Up @@ -2619,7 +2626,7 @@ public void testBwaMemWithSavedLibrary2() throws Exception
config.put("referenceLibraryCreation.SavedLibrary.libraryId", libraryId);
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, false);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2697,7 +2704,7 @@ public void testMergedAlignments() throws Exception

appendSamplesForAlignment(config, models);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2749,7 +2756,7 @@ public void testBowtie2() throws Exception
config.put("alignment", "Bowtie2");
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2849,7 +2856,7 @@ public void testStar() throws Exception
config.put("alignment", "STAR");
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down Expand Up @@ -2939,7 +2946,7 @@ public void testBismarkWithSavedLibraryAndAdapters() throws Exception
config.put("fastqProcessing.AdapterTrimming.adapters", "[[\"Nextera Transposon Adapter A\",\"AGATGTGTATAAGAGACAG\",true,true]]");
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

//we expect the index to get copied back to the reference library location
Expand Down Expand Up @@ -3091,7 +3098,7 @@ public void testBismarkWithSavedLibraryAdaptersAndDelete() throws Exception
config.put("fastqProcessing.AdapterTrimming.adapters", "[[\"Nextera Transposon Adapter A\",\"AGATGTGTATAAGAGACAG\",true,true]]");
appendSamplesForAlignment(config, _readsets);

Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment);
Set<PipelineJob> jobs = createPipelineJob(jobName, config, SequenceAnalysisController.AnalyzeForm.TYPE.alignment, true);
waitForJobs(jobs);

Set<String> extraFiles = new HashSet<>();
Expand Down