Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ abstract public class AbstractCommandWrapper implements CommandWrapper
private File _outputDir = null;
private File _workingDir = null;
private Logger _log;
private boolean _logPath = false;
private Level _logLevel = Level.DEBUG;
private boolean _warnNonZeroExits = true;
private boolean _throwNonZeroExits = true;
Expand Down Expand Up @@ -205,9 +206,11 @@ private void setPath(ProcessBuilder pb)
{
String path = System.getenv("PATH");

getLogger().debug("Existing PATH: " + path);
getLogger().debug("toolDir: " + toolDir);

if (_logPath)
{
getLogger().debug("Existing PATH: " + path);
getLogger().debug("toolDir: " + toolDir);
}

if (path == null)
{
Expand All @@ -229,11 +232,19 @@ private void setPath(ProcessBuilder pb)
path = fileExe.getParent() + File.pathSeparatorChar + path;
}

getLogger().debug("using path: " + path);
if (_logPath)
{
getLogger().debug("using path: " + path);
}
pb.environment().put("PATH", path);
}
}

public void setLogPath(boolean logPath)
{
_logPath = logPath;
}

public void setOutputDir(File outputDir)
{
_outputDir = outputDir;
Expand Down
14 changes: 14 additions & 0 deletions SequenceAnalysis/pipeline_code/extra_tools_install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -319,3 +319,17 @@ then
else
echo "Already installed"
fi

if [[ ! -e ${LKTOOLS_DIR}/sawfish || ! -z $FORCE_REINSTALL ]];
then
echo "Cleaning up previous installs"
rm -Rf $LKTOOLS_DIR/sawfish*

wget https://github.com/PacificBiosciences/sawfish/releases/download/v2.0.0/sawfish-v2.0.0-x86_64-unknown-linux-gnu.tar.gz
tar -xzf sawfish-v2.0.0-x86_64-unknown-linux-gnu.tar.gz

mv sawfish-v2.0.0-x86_64-unknown-linux-gnu $LKTOOLS_DIR/
ln -s $LKTOOLS_DIR/sawfish-v2.0.0/bin/sawfish $LKTOOLS_DIR/
else
echo "Already installed"
fi
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,8 @@
import org.labkey.sequenceanalysis.run.analysis.PbsvAnalysis;
import org.labkey.sequenceanalysis.run.analysis.PbsvJointCallingHandler;
import org.labkey.sequenceanalysis.run.analysis.PindelAnalysis;
import org.labkey.sequenceanalysis.run.analysis.SawfishAnalysis;
import org.labkey.sequenceanalysis.run.analysis.SawfishJointCallingHandler;
import org.labkey.sequenceanalysis.run.analysis.SequenceBasedTypingAnalysis;
import org.labkey.sequenceanalysis.run.analysis.SnpCountAnalysis;
import org.labkey.sequenceanalysis.run.analysis.SubreadAnalysis;
Expand Down Expand Up @@ -342,6 +344,7 @@ public static void registerPipelineSteps()
SequencePipelineService.get().registerPipelineStep(new PindelAnalysis.Provider());
SequencePipelineService.get().registerPipelineStep(new PbsvAnalysis.Provider());
SequencePipelineService.get().registerPipelineStep(new GenrichStep.Provider());
SequencePipelineService.get().registerPipelineStep(new SawfishAnalysis.Provider());

SequencePipelineService.get().registerPipelineStep(new PARalyzerAnalysis.Provider());
SequencePipelineService.get().registerPipelineStep(new RnaSeQCStep.Provider());
Expand Down Expand Up @@ -400,6 +403,7 @@ public static void registerPipelineSteps()
SequenceAnalysisService.get().registerFileHandler(new NextCladeHandler());
SequenceAnalysisService.get().registerFileHandler(new ConvertToCramHandler());
SequenceAnalysisService.get().registerFileHandler(new PbsvJointCallingHandler());
SequenceAnalysisService.get().registerFileHandler(new SawfishJointCallingHandler());
SequenceAnalysisService.get().registerFileHandler(new DeepVariantHandler());
SequenceAnalysisService.get().registerFileHandler(new GLNexusHandler());
SequenceAnalysisService.get().registerFileHandler(new ParagraphStep());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
package org.labkey.sequenceanalysis.run.analysis;

import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable;
import org.labkey.api.pipeline.PipelineJobException;
import org.labkey.api.sequenceanalysis.model.AnalysisModel;
import org.labkey.api.sequenceanalysis.model.Readset;
import org.labkey.api.sequenceanalysis.pipeline.AbstractAnalysisStepProvider;
import org.labkey.api.sequenceanalysis.pipeline.AbstractPipelineStep;
import org.labkey.api.sequenceanalysis.pipeline.AnalysisOutputImpl;
import org.labkey.api.sequenceanalysis.pipeline.AnalysisStep;
import org.labkey.api.sequenceanalysis.pipeline.PipelineContext;
import org.labkey.api.sequenceanalysis.pipeline.PipelineStepProvider;
import org.labkey.api.sequenceanalysis.pipeline.ReferenceGenome;
import org.labkey.api.sequenceanalysis.pipeline.SamtoolsIndexer;
import org.labkey.api.sequenceanalysis.pipeline.SamtoolsRunner;
import org.labkey.api.sequenceanalysis.pipeline.SequencePipelineService;
import org.labkey.api.sequenceanalysis.run.SimpleScriptWrapper;
import org.labkey.sequenceanalysis.util.SequenceUtil;

import java.io.File;
import java.util.ArrayList;
import java.util.List;

public class SawfishAnalysis extends AbstractPipelineStep implements AnalysisStep
{
public SawfishAnalysis(PipelineStepProvider<?> provider, PipelineContext ctx)
{
super(provider, ctx);
}

public static class Provider extends AbstractAnalysisStepProvider<SawfishAnalysis>
{
public Provider()
{
super("sawfish", "Sawfish Analysis", null, "This will run sawfish SV dicvoery and calling on the selected BAMs", List.of(), null, null);
}


@Override
public SawfishAnalysis create(PipelineContext ctx)
{
return new SawfishAnalysis(this, ctx);
}
}

@Override
public Output performAnalysisPerSampleRemote(Readset rs, File inputBam, ReferenceGenome referenceGenome, File outputDir) throws PipelineJobException
{
AnalysisOutputImpl output = new AnalysisOutputImpl();

File inputFile = inputBam;
if (SequenceUtil.FILETYPE.cram.getFileType().isType(inputFile))
{
CramToBam samtoolsRunner = new CramToBam(getPipelineCtx().getLogger());
File bam = new File(getPipelineCtx().getWorkingDirectory(), inputFile.getName().replaceAll(".cram$", ".bam"));
File bamIdx = new File(bam.getPath() + ".bai");
if (!bamIdx.exists())
{
samtoolsRunner.convert(inputFile, bam, referenceGenome.getWorkingFastaFile(), SequencePipelineService.get().getMaxThreads(getPipelineCtx().getLogger()));
new SamtoolsIndexer(getPipelineCtx().getLogger()).execute(bam);
}
else
{
getPipelineCtx().getLogger().debug("BAM index exists, will not re-convert CRAM");
}

inputFile = bam;

output.addIntermediateFile(bam);
output.addIntermediateFile(bamIdx);
}

List<String> args = new ArrayList<>();
args.add(getExe().getPath());
args.add("discover");

args.add("--bam");
args.add(inputFile.getPath());

args.add("--ref");
args.add(referenceGenome.getWorkingFastaFile().getPath());

File svOutDir = new File(outputDir, "sawfish");
args.add("--output-dir");
args.add(svOutDir.getPath());

Integer maxThreads = SequencePipelineService.get().getMaxThreads(getPipelineCtx().getLogger());
if (maxThreads != null)
{
args.add("--threads");
args.add(String.valueOf(maxThreads));
}

File bcf = new File(svOutDir, "candidate.sv.bcf");
File bcfIdx = new File(bcf.getPath() + ".csi");
if (bcfIdx.exists())
{
getPipelineCtx().getLogger().debug("BCF index already exists, reusing output");
}
else
{
new SimpleScriptWrapper(getPipelineCtx().getLogger()).execute(args);
}

if (!bcf.exists())
{
throw new PipelineJobException("Unable to find file: " + bcf.getPath());
}

output.addSequenceOutput(bcf, rs.getName() + ": sawfish", "Sawfish SV Discovery", rs.getReadsetId(), null, referenceGenome.getGenomeId(), null);

return output;
}

@Override
public Output performAnalysisPerSampleLocal(AnalysisModel model, File inputBam, File referenceFasta, File outDir) throws PipelineJobException
{
return null;
}

private File getExe()
{
return SequencePipelineService.get().getExeForPackage("SAWFISHPATH", "sawfish");
}

private static class CramToBam extends SamtoolsRunner
{
public CramToBam(Logger log)
{
super(log);
}

public void convert(File inputCram, File outputBam, File fasta, @Nullable Integer threads) throws PipelineJobException
{
getLogger().info("Converting CRAM to BAM");

execute(getParams(inputCram, outputBam, fasta, threads));
}

private List<String> getParams(File inputCram, File outputBam, File fasta, @Nullable Integer threads)
{
List<String> params = new ArrayList<>();
params.add(getSamtoolsPath().getPath());
params.add("view");
params.add("-b");
params.add("-T");
params.add(fasta.getPath());
params.add("-o");
params.add(outputBam.getPath());

if (threads != null)
{
params.add("-@");
params.add(String.valueOf(threads));
}

params.add(inputCram.getPath());

return params;
}
}
}
Loading