Skip to content

Commit bd45f82

Browse files
go all in on IntHashMap and LongHashMap
1 parent 40b391c commit bd45f82

20 files changed

+72
-57
lines changed

SequenceAnalysis/src/org/labkey/sequenceanalysis/ReadDataImpl.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
package org.labkey.sequenceanalysis;
22

3+
import org.labkey.api.collections.IntHashMap;
34
import org.labkey.api.data.CompareType;
45
import org.labkey.api.data.SimpleFilter;
56
import org.labkey.api.data.Sort;
@@ -14,7 +15,6 @@
1415

1516
import java.io.File;
1617
import java.util.Date;
17-
import java.util.HashMap;
1818
import java.util.List;
1919
import java.util.Map;
2020

@@ -40,7 +40,7 @@ public class ReadDataImpl implements ReadData
4040
private boolean _archived = false;
4141
private String sra_accession;
4242

43-
private final Map<Integer, File> _cachedFiles = new HashMap<>();
43+
private final Map<Integer, File> _cachedFiles = new IntHashMap<>();
4444

4545
public ReadDataImpl()
4646
{

SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisController.java

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,8 @@
5353
import org.labkey.api.assay.AssayFileWriter;
5454
import org.labkey.api.collections.CaseInsensitiveHashMap;
5555
import org.labkey.api.collections.CaseInsensitiveHashSet;
56+
import org.labkey.api.collections.IntHashMap;
57+
import org.labkey.api.collections.IntHashSet;
5658
import org.labkey.api.data.ColumnInfo;
5759
import org.labkey.api.data.CompareType;
5860
import org.labkey.api.data.Container;
@@ -596,11 +598,11 @@ public ModelAndView getConfirmView(DeleteForm form, BindException errors) throws
596598
keys.add(ConvertHelper.convert(key, Integer.class));
597599
}
598600

599-
Set<Integer> expRunsToDelete = new HashSet<>();
600-
Set<Integer> readsetIds = new HashSet<>();
601-
Set<Integer> readDataIds = new HashSet<>();
602-
Set<Integer> analysisIds = new HashSet<>();
603-
Set<Integer> outputFileIds = new HashSet<>();
601+
Set<Integer> expRunsToDelete = new IntHashSet();
602+
Set<Integer> readsetIds = new IntHashSet();
603+
Set<Integer> readDataIds = new IntHashSet();
604+
Set<Integer> analysisIds = new IntHashSet();
605+
Set<Integer> outputFileIds = new IntHashSet();
604606

605607
StringBuilder msg = new StringBuilder("Are you sure you want to delete the following " + keys.size() + " ");
606608
if (SequenceAnalysisSchema.TABLE_ANALYSES.equals(_table.getName()))
@@ -753,7 +755,7 @@ public ModelAndView handleRequest(HttpServletRequest request, HttpServletRespons
753755

754756
private void getAdditionalRuns(Set<Integer> readsetIds, Set<Integer> readDataIds, Set<Integer> analysisIds, Set<Integer> outputFileIds, Set<Integer> expRunsToDelete)
755757
{
756-
Set<Integer> runIdsStillInUse = new HashSet<>();
758+
Set<Integer> runIdsStillInUse = new IntHashSet();
757759

758760
//work backwards, adding additional pipeline jobs that will become orphans:
759761
runIdsStillInUse.addAll(getRunIdsInUse(SequenceAnalysisSchema.TABLE_READSETS, expRunsToDelete, readsetIds));
@@ -786,7 +788,7 @@ private Set<Integer> appendTotal(StringBuilder sb, String tableName, String noun
786788
{
787789
SimpleFilter filter = new SimpleFilter(FieldKey.fromString(filterCol), keys, CompareType.IN);
788790
TableSelector ts = new TableSelector(SequenceAnalysisSchema.getInstance().getSchema().getTable(tableName), PageFlowUtil.set(pkCol), filter, null);
789-
Set<Integer> total = new HashSet<>(ts.getArrayList(Integer.class));
791+
Set<Integer> total = new IntHashSet(ts.getArrayList(Integer.class));
790792
sb.append("<br>" + total.size() + " " + noun);
791793

792794
return total;
@@ -1164,7 +1166,7 @@ public ApiResponse execute(ValidateReadsetImportForm form, BindException errors)
11641166

11651167
if (readsets1.length > 0 || readsets2.length > 0)
11661168
{
1167-
Set<Integer> ids = new HashSet<>();
1169+
Set<Integer> ids = new IntHashSet();
11681170
ids.addAll(Arrays.asList(readsets1));
11691171
ids.addAll(Arrays.asList(readsets2));
11701172

@@ -3888,7 +3890,7 @@ protected PipelineJob createOutputJob(RunSequenceHandlerForm form, Container tar
38883890

38893891
protected void validateGenomes(List<SequenceOutputFile> inputs, SequenceOutputHandler<?> handler) throws IllegalArgumentException
38903892
{
3891-
Set<Integer> genomes = new HashSet<>();
3893+
Set<Integer> genomes = new IntHashSet();
38923894
inputs.forEach(x -> {
38933895
if (x.getLibrary_id() == null && (handler.requiresGenome() || handler.requiresSingleGenome()))
38943896
{
@@ -4897,7 +4899,7 @@ public ApiResponse execute(OutputFilesForm form, BindException errors) throws Ex
48974899
{
48984900
Map<String, Object> resp = new HashMap<>();
48994901

4900-
Map<Integer, JSONObject> fileMap = new HashMap<>();
4902+
Map<Integer, JSONObject> fileMap = new IntHashMap<>();
49014903
for (Integer rowId : form.getOutputFileIds())
49024904
{
49034905
SequenceOutputFile f = SequenceOutputFile.getForId(rowId);

SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisMaintenanceTask.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import org.apache.commons.lang3.SystemUtils;
55
import org.apache.logging.log4j.Level;
66
import org.apache.logging.log4j.Logger;
7+
import org.labkey.api.collections.IntHashMap;
78
import org.labkey.api.data.CompareType;
89
import org.labkey.api.data.Container;
910
import org.labkey.api.data.ContainerManager;
@@ -42,7 +43,6 @@
4243
import java.util.Arrays;
4344
import java.util.Collections;
4445
import java.util.Date;
45-
import java.util.HashMap;
4646
import java.util.HashSet;
4747
import java.util.List;
4848
import java.util.Map;
@@ -101,7 +101,7 @@ public void run()
101101
{
102102
try
103103
{
104-
Map<Integer, File> genomeMap = new HashMap<>();
104+
Map<Integer, File> genomeMap = new IntHashMap<>();
105105
new TableSelector(SequenceAnalysisSchema.getInstance().getSchema().getTable(SequenceAnalysisSchema.TABLE_REF_LIBRARIES), PageFlowUtil.set("rowid", "fasta_file"), new SimpleFilter(FieldKey.fromString("datedisabled"), null, CompareType.ISBLANK), null).forEachResults(rs -> {
106106
int dataId = rs.getInt(FieldKey.fromString("fasta_file"));
107107
if (dataId > -1)

SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/CombineStarGeneCountsHandler.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package org.labkey.sequenceanalysis.analysis;
22

33
import org.json.JSONObject;
4+
import org.labkey.api.collections.IntHashMap;
45
import org.labkey.api.pipeline.PipelineJob;
56
import org.labkey.api.pipeline.PipelineJobException;
67
import org.labkey.api.pipeline.RecordedAction;
@@ -41,9 +42,9 @@ protected void processOutputFiles(CountResults results, List<SequenceOutputFile>
4142
long totalStrand2 = 0L;
4243

4344
results.distinctGenes.addAll(translator.getGeneMap().keySet());
44-
Map<Integer, Map<String, Double>> unstrandedCounts = new HashMap<>(inputFiles.size());
45-
Map<Integer, Map<String, Double>> strand1Counts = new HashMap<>(inputFiles.size());
46-
Map<Integer, Map<String, Double>> strand2Counts = new HashMap<>(inputFiles.size());
45+
Map<Integer, Map<String, Double>> unstrandedCounts = new IntHashMap<>(inputFiles.size());
46+
Map<Integer, Map<String, Double>> strand1Counts = new IntHashMap<>(inputFiles.size());
47+
Map<Integer, Map<String, Double>> strand2Counts = new IntHashMap<>(inputFiles.size());
4748

4849
for (SequenceOutputFile so : inputFiles)
4950
{

SequenceAnalysis/src/org/labkey/sequenceanalysis/pipeline/AlignmentImportTask.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ private List<AnalysisModel> parseAndCreateAnalyses() throws PipelineJobException
187187

188188
//process metrics
189189
Map<Long, Long> readsetToAnalysisMap = new LongHashMap<>();
190-
Map<Long, Map<PipelineStepOutput.PicardMetricsOutput.TYPE, File>> typeMap = new HashMap<>();
190+
Map<Long, Map<PipelineStepOutput.PicardMetricsOutput.TYPE, File>> typeMap = new LongHashMap<>();
191191
for (AnalysisModel model : ret)
192192
{
193193
readsetToAnalysisMap.put(model.getReadset(), model.getRowId());

SequenceAnalysis/src/org/labkey/sequenceanalysis/pipeline/CacheGenomeTrigger.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package org.labkey.sequenceanalysis.pipeline;
22

33
import org.apache.logging.log4j.Logger;
4+
import org.labkey.api.collections.IntHashMap;
45
import org.labkey.api.data.Container;
56
import org.labkey.api.module.ModuleLoader;
67
import org.labkey.api.pipeline.PipeRoot;
@@ -15,7 +16,6 @@
1516
import org.labkey.sequenceanalysis.SequenceAnalysisModule;
1617

1718
import java.io.File;
18-
import java.util.HashMap;
1919
import java.util.Map;
2020

2121
public class CacheGenomeTrigger implements GenomeTrigger
@@ -48,7 +48,7 @@ private void possiblyCache(Container c, User u, Logger log, int genomeId)
4848
{
4949
try
5050
{
51-
Map<Integer, File> genomeMap = new HashMap<>();
51+
Map<Integer, File> genomeMap = new IntHashMap<>();
5252
ReferenceGenome rg = SequenceAnalysisService.get().getReferenceGenome(genomeId, u);
5353
genomeMap.put(rg.getGenomeId(), rg.getSourceFastaFile());
5454
cacheGenomes(c, u, genomeMap, log, false);

SequenceAnalysis/src/org/labkey/sequenceanalysis/pipeline/ProcessVariantsHandler.java

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,9 @@
1616
import org.junit.Assert;
1717
import org.junit.Test;
1818
import org.labkey.api.collections.CaseInsensitiveHashSet;
19+
import org.labkey.api.collections.IntHashMap;
20+
import org.labkey.api.collections.IntHashSet;
21+
import org.labkey.api.collections.LongHashSet;
1922
import org.labkey.api.data.Container;
2023
import org.labkey.api.laboratory.DemographicsProvider;
2124
import org.labkey.api.module.Module;
@@ -160,7 +163,7 @@ public SequenceOutputFile createFinalSequenceOutput(PipelineJob job, File proces
160163

161164
public static SequenceOutputFile createSequenceOutput(PipelineJob job, File processed, List<SequenceOutputFile> inputFiles, String category)
162165
{
163-
Set<Integer> libraryIds = new HashSet<>();
166+
Set<Integer> libraryIds = new IntHashSet();
164167
inputFiles.forEach(x -> {
165168
if (x.getLibrary_id() != null)
166169
libraryIds.add(x.getLibrary_id());
@@ -171,7 +174,7 @@ public static SequenceOutputFile createSequenceOutput(PipelineJob job, File proc
171174
throw new IllegalArgumentException("No library ID defined for VCFs");
172175
}
173176

174-
Set<Long> readsetIds = new HashSet<>();
177+
Set<Long> readsetIds = new LongHashSet();
175178
inputFiles.forEach(x -> readsetIds.add(x.getReadset()));
176179

177180
int sampleCount;
@@ -598,7 +601,7 @@ public void processFilesRemote(List<SequenceOutputFile> inputFiles, JobContext c
598601
throw new PipelineJobException("Priority order not supplied for VCFs");
599602
}
600603

601-
Set<Integer> genomes = new HashSet<>();
604+
Set<Integer> genomes = new IntHashSet();
602605
inputFiles.forEach(x -> genomes.add(x.getLibrary_id()));
603606

604607
if (genomes.size() != 1)
@@ -609,7 +612,7 @@ public void processFilesRemote(List<SequenceOutputFile> inputFiles, JobContext c
609612
ReferenceGenome rg = ctx.getSequenceSupport().getCachedGenome(genomes.iterator().next());
610613
MergeVcfsAndGenotypesWrapper cv = new MergeVcfsAndGenotypesWrapper(ctx.getLogger());
611614

612-
Map<Integer, Long> fileMap = new HashMap<>();
615+
Map<Integer, Long> fileMap = new IntHashMap<>();
613616
inputFiles.forEach(x -> fileMap.put(x.getRowid(), x.getDataId()));
614617
String[] ids = priorityOrder.split(",");
615618

SequenceAnalysis/src/org/labkey/sequenceanalysis/pipeline/ReadsetCreationTask.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
import au.com.bytecode.opencsv.CSVReader;
1919
import org.jetbrains.annotations.NotNull;
2020
import org.labkey.api.collections.CaseInsensitiveHashMap;
21+
import org.labkey.api.collections.LongHashMap;
22+
import org.labkey.api.collections.LongHashSet;
2123
import org.labkey.api.data.CompareType;
2224
import org.labkey.api.data.Container;
2325
import org.labkey.api.data.DbSchema;
@@ -62,7 +64,6 @@
6264
import java.util.Collections;
6365
import java.util.Date;
6466
import java.util.HashMap;
65-
import java.util.HashSet;
6667
import java.util.List;
6768
import java.util.Map;
6869
import java.util.Set;
@@ -166,10 +167,10 @@ private void importReadsets() throws PipelineJobException
166167

167168
List<SequenceReadsetImpl> newReadsets = new ArrayList<>();
168169

169-
Set<Long> fileIdsWithExistingMetrics = new HashSet<>();
170+
Set<Long> fileIdsWithExistingMetrics = new LongHashSet();
170171
try (DbScope.Transaction transaction = schema.getScope().ensureTransaction())
171172
{
172-
Map<Long, String> readsetsToDeactivate = new HashMap<>();
173+
Map<Long, String> readsetsToDeactivate = new LongHashMap<>();
173174
TableInfo readsetTable = schema.getTable(SequenceAnalysisSchema.TABLE_READSETS);
174175
TableInfo readDataTable = schema.getTable(SequenceAnalysisSchema.TABLE_READ_DATA);
175176

SequenceAnalysis/src/org/labkey/sequenceanalysis/query/SequenceTriggerHelper.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
import org.biojava.nbio.core.sequence.transcription.TranscriptionEngine;
1414
import org.junit.Assert;
1515
import org.junit.Test;
16+
import org.labkey.api.collections.IntHashMap;
1617
import org.labkey.api.data.Container;
1718
import org.labkey.api.data.ContainerManager;
1819
import org.labkey.api.data.SimpleFilter;
@@ -34,7 +35,6 @@
3435
import java.io.IOException;
3536
import java.io.StringWriter;
3637
import java.util.Arrays;
37-
import java.util.HashMap;
3838
import java.util.List;
3939
import java.util.Map;
4040

@@ -50,7 +50,7 @@ public class SequenceTriggerHelper
5050

5151
private static final TranscriptionEngine _engine = new TranscriptionEngine.Builder().dnaCompounds(AmbiguityDNACompoundSet.getDNACompoundSet()).rnaCompounds(AmbiguityRNACompoundSet.getRNACompoundSet()).initMet(false).trimStop(false).build();
5252

53-
private final Map<Integer, String> _sequenceMap = new HashMap<>();
53+
private final Map<Integer, String> _sequenceMap = new IntHashMap<>();
5454

5555
public SequenceTriggerHelper(int userId, String containerId)
5656
{

SequenceAnalysis/src/org/labkey/sequenceanalysis/run/BamHaplotyper.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313
import org.apache.logging.log4j.LogManager;
1414
import org.json.JSONArray;
1515
import org.json.JSONObject;
16+
import org.labkey.api.collections.IntHashMap;
17+
import org.labkey.api.collections.StringHashMap;
1618
import org.labkey.api.data.CompareType;
1719
import org.labkey.api.data.Container;
1820
import org.labkey.api.data.ContainerManager;
@@ -37,7 +39,6 @@
3739
import java.io.IOException;
3840
import java.util.ArrayList;
3941
import java.util.Arrays;
40-
import java.util.HashMap;
4142
import java.util.List;
4243
import java.util.Map;
4344
import java.util.Set;
@@ -119,7 +120,7 @@ public JSONObject calculateNTHaplotypes(int[] inputFileIds, String[] regions, in
119120

120121
if (!found)
121122
{
122-
Map<Integer, Integer> map = new HashMap<>();
123+
Map<Integer, Integer> map = new IntHashMap<>();
123124
map.put(so.getRowid(), newPair.second);
124125
combinedResults.add(Pair.of(newPair.first, map));
125126
}
@@ -162,7 +163,7 @@ private String getReferenceSequence(ReferenceSequence ref, Map<Integer, TreeSet<
162163
private Map<Integer, TreeSet<Integer>> getInsertionMap(List<Pair<Character[][], Map<Integer, Integer>>> combinedResults)
163164
{
164165
//build list of all insertions that are present
165-
Map<Integer, TreeSet<Integer>> indels = new HashMap<>();
166+
Map<Integer, TreeSet<Integer>> indels = new IntHashMap<>();
166167
for (Pair<Character[][], Map<Integer, Integer>> pair : combinedResults)
167168
{
168169
for (int idx = 0;idx < pair.first.length;idx++)
@@ -189,7 +190,7 @@ private Map<Integer, TreeSet<Integer>> getInsertionMap(List<Pair<Character[][],
189190
private Map<String, Map<Integer, Integer>> convertResults(List<Pair<Character[][], Map<Integer, Integer>>> combinedResults, byte[] refBases, Map<Integer, TreeSet<Integer>> indels)
190191
{
191192
//now iterate each array, convert to string and return results
192-
Map<String, Map<Integer, Integer>> ret = new HashMap<>();
193+
Map<String, Map<Integer, Integer>> ret = new StringHashMap<>();
193194
for (Pair<Character[][], Map<Integer, Integer>> pair : combinedResults)
194195
{
195196
StringBuilder sb = new StringBuilder();

0 commit comments

Comments
 (0)