Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ import {
GridToolbarExport
} from '@mui/x-data-grid';
import SearchIcon from '@mui/icons-material/Search';
import LinkIcon from '@mui/icons-material/Link';
import DownloadIcon from '@mui/icons-material/Download'
import { ActionURL } from '@labkey/api';
import React, { useEffect, useState } from 'react';
import { getConf } from '@jbrowse/core/configuration';
import { AppBar, Box, Button, Dialog, Paper, Popover, Toolbar, Tooltip, Typography } from '@mui/material';
Expand Down Expand Up @@ -92,6 +95,27 @@ const VariantTableWidget = observer(props => {
fetchLuceneQuery(passedFilters, sessionId, trackGUID, page, pageSize, field, sort, (json)=>{handleSearch(json)}, (error) => {setDataLoaded(true); setError(error)});
}

const handleExport = () => {
const currentUrl = new URL(window.location.href);

const searchString = createEncodedFilterString(filters, true);
const sortField = sortModel[0]?.field ?? 'genomicPosition';
const sortDirection = sortModel[0]?.sort ?? false;

const sortReverse = (sortDirection === 'desc');

const rawUrl = ActionURL.buildURL('jbrowse', 'luceneCSVExport.api');
const exportUrl = new URL(rawUrl, window.location.origin);

exportUrl.searchParams.set('sessionId', sessionId);
exportUrl.searchParams.set('trackId', trackGUID);
exportUrl.searchParams.set('searchString', searchString);
exportUrl.searchParams.set('sortField', sortField);
exportUrl.searchParams.set('sortReverse', sortReverse.toString());

window.location.href = exportUrl.toString();
};

const TableCellWithPopover = (props: { value: any }) => {
const { value } = props;
const fullDisplayValue = value ? (Array.isArray(value) ? value.join(', ') : value) : ''
Expand Down Expand Up @@ -184,7 +208,7 @@ const VariantTableWidget = observer(props => {
);
}

function CustomToolbar({ setFilterModalOpen }) {
function CustomToolbar({ setFilterModalOpen, handleExport }) {
return (
<GridToolbarContainer>
<GridToolbarColumnsButton />
Expand All @@ -197,17 +221,25 @@ const VariantTableWidget = observer(props => {
Search
</Button>
<GridToolbarDensitySelector />
<GridToolbarExport csvOptions={{
delimiter: ';',
}} />

<Button
startIcon={<DownloadIcon />}
color="primary"
onClick={handleExport}
>
Export CSV
</Button>

<ShareButton />
</GridToolbarContainer>
);
}

const ToolbarWithProps = () => (
<CustomToolbar setFilterModalOpen={setFilterModalOpen} />
<CustomToolbar
setFilterModalOpen={setFilterModalOpen}
handleExport={handleExport}
/>
);

const handleOffsetChange = (newOffset: number) => {
Expand Down
61 changes: 60 additions & 1 deletion jbrowse/src/org/labkey/jbrowse/JBrowseController.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.vcf.VCFFileReader;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;
Expand Down Expand Up @@ -78,6 +79,8 @@
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
Expand Down Expand Up @@ -891,6 +894,55 @@ public void validateForm(ResolveVcfFieldsForm form, Errors errors)
}
}

@RequiresPermission(ReadPermission.class)
public static class LuceneCSVExportAction extends ReadOnlyApiAction<LuceneQueryForm>
{
@Override
public ApiResponse execute(LuceneQueryForm form, BindException errors)
{
try
{
JBrowseLuceneSearch searcher = JBrowseLuceneSearch.create(form.getSessionId(), form.getTrackId(), getUser());
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss");
String timestamp = LocalDateTime.now().format(formatter);
String filename = "mGAP_results_" + timestamp + ".csv";

HttpServletResponse response = getViewContext().getResponse();
response.setContentType("text/csv");
response.setHeader("Content-Disposition", "attachment; filename=\"" + filename + "\"");

searcher.doSearchCSV(
getUser(),
PageFlowUtil.decode(form.getSearchString()),
form.getSortField(),
form.getSortReverse(),
response
);

return null;
}
catch (Exception e)
{
_log.error("Error in JBrowse lucene query", e);
errors.reject(ERROR_MSG, e.getMessage());
return null;
}
}

@Override
public void validateForm(LuceneQueryForm form, Errors errors)
{
if ((form.getSearchString() == null || form.getSessionId() == null || form.getTrackId() == null))
{
errors.reject(ERROR_MSG, "Must provide search string, track ID, and the JBrowse session ID");
}
else if (!isValidUUID(form.getTrackId()))
{
errors.reject(ERROR_MSG, "Invalid track ID: " + form.getTrackId());
}
}
}

@RequiresPermission(ReadPermission.class)
public static class LuceneQueryAction extends ReadOnlyApiAction<LuceneQueryForm>
{
Expand All @@ -910,7 +962,14 @@ public ApiResponse execute(LuceneQueryForm form, BindException errors)

try
{
return new ApiSimpleResponse(searcher.doSearch(getUser(), PageFlowUtil.decode(form.getSearchString()), form.getPageSize(), form.getOffset(), form.getSortField(), form.getSortReverse()));
return new ApiSimpleResponse(searcher.doSearchJSON(
getUser(),
PageFlowUtil.decode(form.getSearchString()),
form.getPageSize(),
form.getOffset(),
form.getSortField(),
form.getSortReverse()
));
}
catch (Exception e)
{
Expand Down
95 changes: 82 additions & 13 deletions jbrowse/src/org/labkey/jbrowse/JBrowseLuceneSearch.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package org.labkey.jbrowse;

import jakarta.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
Expand All @@ -20,6 +21,7 @@
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopFieldDocs;
Expand Down Expand Up @@ -50,6 +52,7 @@

import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.text.DecimalFormat;
Expand Down Expand Up @@ -189,7 +192,17 @@ public String extractFieldName(String queryString)
return parts.length > 0 ? parts[0].trim() : null;
}

public JSONObject doSearch(User u, String searchString, final int pageSize, final int offset, String sortField, boolean sortReverse) throws IOException, ParseException
public JSONObject doSearchJSON(User u, String searchString, final int pageSize, final int offset, String sortField, boolean sortReverse) throws IOException, ParseException {
SearchConfig searchConfig = createSearchConfig(u, searchString, pageSize, offset, sortField, sortReverse);
return paginateJSON(searchConfig);
}

public void doSearchCSV(User u, String searchString, String sortField, boolean sortReverse, HttpServletResponse response) throws IOException, ParseException {
SearchConfig searchConfig = createSearchConfig(u, searchString, 0, 0, sortField, sortReverse);
exportCSV(searchConfig, response);
}

private SearchConfig createSearchConfig(User u, String searchString, final int pageSize, final int offset, String sortField, boolean sortReverse) throws IOException, ParseException
{
searchString = tryUrlDecode(searchString);
File indexPath = _jsonFile.getExpectedLocationOfLuceneIndex(true);
Expand All @@ -199,6 +212,7 @@ public JSONObject doSearch(User u, String searchString, final int pageSize, fina
Analyzer analyzer = new StandardAnalyzer();

List<String> stringQueryParserFields = new ArrayList<>();
List<String> fieldsList = new ArrayList<>();
Map<String, SortField.Type> numericQueryParserFields = new HashMap<>();
PointsConfig intPointsConfig = new PointsConfig(new DecimalFormat(), Integer.class);
PointsConfig doublePointsConfig = new PointsConfig(new DecimalFormat(), Double.class);
Expand All @@ -208,6 +222,7 @@ public JSONObject doSearch(User u, String searchString, final int pageSize, fina
for (Map.Entry<String, JBrowseFieldDescriptor> entry : fields.entrySet())
{
String field = entry.getKey();
fieldsList.add(field);
JBrowseFieldDescriptor descriptor = entry.getValue();

switch(descriptor.getType())
Expand Down Expand Up @@ -267,14 +282,14 @@ else if (numericQueryParserFields.containsKey(fieldName))
}
catch (QueryNodeException e)
{
_log.error("Unable to parse query for field " + fieldName + ": " + queryString, e);
_log.error("Unable to parse query for field {}: {}", fieldName, queryString, e);

throw new IllegalArgumentException("Unable to parse query: " + queryString + " for field: " + fieldName);
}
}
else
{
_log.error("No such field(s), or malformed query: " + queryString + ", field: " + fieldName);
_log.error("No such field(s), or malformed query: {}, field: {}", queryString, fieldName);

throw new IllegalArgumentException("No such field(s), or malformed query: " + queryString + ", field: " + fieldName);
}
Expand Down Expand Up @@ -302,43 +317,79 @@ else if (numericQueryParserFields.containsKey(fieldName))
sort = new Sort(new SortField(sortField + "_sort", fieldType, sortReverse));
}

return new SearchConfig(cacheEntry, query, pageSize, offset, sort, fieldsList);
}

private JSONObject paginateJSON(SearchConfig c) throws IOException, ParseException {
// Get chunks of size {pageSize}. Default to 1 chunk -- add to the offset to get more.
// We then iterate over the range of documents we want based on the offset. This does grow in memory
// linearly with the number of documents, but my understanding is that these are just score,id pairs
// rather than full documents, so mem usage *should* still be pretty low.
// Perform the search with sorting
TopFieldDocs topDocs = cacheEntry.indexSearcher.search(query, pageSize * (offset + 1), sort);

TopFieldDocs topDocs = c.cacheEntry.indexSearcher.search(c.query, c.pageSize * (c.offset + 1), c.sort);
JSONObject results = new JSONObject();

// Iterate over the doc list, (either to the total end or until the page ends) grab the requested docs,
// and add to returned results
List<JSONObject> data = new ArrayList<>();
for (int i = pageSize * offset; i < Math.min(pageSize * (offset + 1), topDocs.scoreDocs.length); i++)
for (int i = c.pageSize * c.offset; i < Math.min(c.pageSize * (c.offset + 1), topDocs.scoreDocs.length); i++)
{
JSONObject elem = new JSONObject();
Document doc = cacheEntry.indexSearcher.storedFields().document(topDocs.scoreDocs[i].doc);
Document doc = c.cacheEntry.indexSearcher.storedFields().document(topDocs.scoreDocs[i].doc);

for (IndexableField field : doc.getFields()) {
for (IndexableField field : doc.getFields())
{
String fieldName = field.name();
String[] fieldValues = doc.getValues(fieldName);
if (fieldValues.length > 1) {
if (fieldValues.length > 1)
{
elem.put(fieldName, fieldValues);
} else {
}
else
{
elem.put(fieldName, fieldValues[0]);
}
}

data.add(elem);
}

results.put("data", data);
results.put("totalHits", topDocs.totalHits.value);

//TODO: we should probably stream this
return results;
}

private void exportCSV(SearchConfig c, HttpServletResponse response) throws IOException
{
PrintWriter writer = response.getWriter();
IndexSearcher searcher = c.cacheEntry.indexSearcher;
TopFieldDocs topDocs = searcher.search(c.query, Integer.MAX_VALUE, c.sort);

writer.println(String.join(",", c.fields));

for (ScoreDoc scoreDoc : topDocs.scoreDocs)
{
Document doc = searcher.storedFields().document(scoreDoc.doc);
List<String> rowValues = new ArrayList<>();

for (String fieldName : c.fields)
{
String[] values = doc.getValues(fieldName);
String value = values.length > 0
? String.join(",", values)
: "";

// Escape strings
value = "\"" + value.replace("\"", "\"\"") + "\"";
rowValues.add(value);
}

writer.println(String.join(",", rowValues));
}

writer.flush();
}

public static class DefaultJBrowseFieldCustomizer extends AbstractJBrowseFieldCustomizer
{
public DefaultJBrowseFieldCustomizer()
Expand Down Expand Up @@ -583,7 +634,7 @@ public void cacheDefaultQuery()
try
{
JBrowseLuceneSearch.clearCache(_jsonFile.getObjectId());
doSearch(_user, ALL_DOCS, 100, 0, GENOMIC_POSITION, false);
doSearchJSON(_user, ALL_DOCS, 100, 0, GENOMIC_POSITION, false);
}
catch (ParseException | IOException e)
{
Expand Down Expand Up @@ -641,4 +692,22 @@ public void shutdownStarted()
JBrowseLuceneSearch.emptyCache();
}
}

private class SearchConfig {
CacheEntry cacheEntry;
Query query;
int pageSize;
int offset;
Sort sort;
List<String> fields;

public SearchConfig(CacheEntry cacheEntry, Query query, int pageSize, int offset, Sort sort, List<String> fields) {
this.cacheEntry = cacheEntry;
this.query = query;
this.pageSize = pageSize;
this.offset = offset;
this.sort = sort;
this.fields = fields;
}
}
}
Loading