Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .babelrc
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
"presets": [
"@babel/preset-env",
"@babel/preset-typescript",
"@babel/preset-react"
["@babel/preset-react", {
"runtime": "automatic"
}]
]
}
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "consort-frontend",
"version": "0.19.0",
"version": "0.20.0",
"description": "",
"engines": {
"npm": ">=10",
Expand Down
9 changes: 8 additions & 1 deletion server/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,14 @@ app.use(session({
secret: 'keyboard cat',
resave: false, // don't save session if unmodified
saveUninitialized: false, // don't create session until something stored
store: new SQLiteStore({ db: 'sessions.db', dir: './var/db' })
store: new SQLiteStore({ db: 'sessions.db', dir: './var/db' }),
rolling: true, // Reset session expiration on each request
cookie: {
maxAge: 24 * 60 * 60 * 1000, // 1 day in milliseconds
httpOnly: true,
secure: process.env.NODE_ENV === 'production', // Use secure cookies in production
sameSite: 'lax'
}
}));

// CORS middleware for all /api/* routes - must be before authentication
Expand Down
33 changes: 15 additions & 18 deletions src/actions/client.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,11 @@

import {
createEmptyDatasetRequest,
getDatasetMetadataLoop,
getFileInDataset,
uploadFileToDatasetRequest
} from "../utils/dataset";
import config from "../app.config";
import {wordPipeline} from "../utils/word_pipeline";
import {pdfPipeline} from "../utils/pdf_pipeline";
import {SET_EXTRACTION_STATUS, setExtractionStatus} from "./file";
import {setExtractionStatus} from "./file";
import {ADD_FILE_TO_DATASET, addFileToDataset, CREATE_DATASETS, createDataset} from "./dataset";
import {resetFileToDefault} from "./file";
import {resetDatasetToDefault} from "./dataset";
Expand All @@ -22,43 +19,44 @@ import {resetUserCategoryToDefault} from "./dashboard";
export function createUploadExtract(file, config) {
return async function createUploadExtractThunk(dispatch) {
// this function creates an empty dataset. uploads the file to the dataset and submits for extraction
console.log("StatementType", config.statementType)
console.log("UserCategory", config.userCategory)
// console.log("StatementType", config.statementType)
// console.log("UserCategory", config.userCategory)
console.log("Config", config);
// Clowder API call to create empty dataset
const file_name = file.name.replace(/\.[^/.]+$/, ""); // get filename without extension as dataset name
const file_description = file.type;
console.log("Uploading file", file_name);
// console.log("Uploading file", file_name);
const dataset_json = await createEmptyDatasetRequest(file_name, file_description); // returns the dataset ID {id:xxx}
if (dataset_json !== undefined && dataset_json !== null) {
dispatch(createDataset(CREATE_DATASETS, dataset_json));
// upload input file to dataset
let file_json = await uploadFileToDatasetRequest(dataset_json.id, file); // return file ID. {id:xxx} OR {ids:[{id:xxx}, {id:xxx}]}
const file_json = await uploadFileToDatasetRequest(dataset_json.id, file); // return file ID. {id:xxx} OR {ids:[{id:xxx}, {id:xxx}]}
if (file_json !== undefined){
file_json["filename"] = file.name;
// submit uploaded file for extraction
dispatch(setExtractionStatus("Analyzing file"));
if (file.type == "application/vnd.openxmlformats-officedocument.wordprocessingml.document" || file.type =="application/msword"){
if (file.type === "application/vnd.openxmlformats-officedocument.wordprocessingml.document" || file.type === "application/msword"){
const word_pipeline_status = await wordPipeline(file_json, dataset_json, config, dispatch);
if (word_pipeline_status) {
console.log("Analysis complete");
// console.log("Analysis complete");
dispatch(setExtractionStatus("Analysis complete"));

}
else {
console.error("Analysis failed");
// console.error("Analysis failed");
dispatch(setExtractionStatus("Analysis failed"));
}

}
else if (file.type == "application/pdf") {
else if (file.type === "application/pdf") {
const pdf_pipeline_status = await pdfPipeline(file_json, dataset_json, config, dispatch);
if (pdf_pipeline_status) {
console.log("Analysis complete.");
// console.log("Analysis complete.");
dispatch(setExtractionStatus("Analysis complete"));

}
else {
console.error("Analysis failed");
// console.error("Analysis failed");
dispatch(setExtractionStatus("Analysis failed"));
}

Expand All @@ -68,19 +66,18 @@ export function createUploadExtract(file, config) {
}
else {
// TODO add error action
console.error("Error in file type");
// console.error("Error in file type");
dispatch(setExtractionStatus("Error in file type"));
}
// after submitting uploaded file for extraction, add the file to dataset state
dispatch(addFileToDataset(ADD_FILE_TO_DATASET, file_json));
}
else {
console.error("Error in clowder upload of file ", file.name)
dispatch(setExtractionStatus("Error in clowder upload of file " + file.name));
dispatch(setExtractionStatus(`Error in clowder upload of file ${file.name}`));
}
}
else {
console.error("Error in dataset creation");
// console.error("Error in dataset creation");
dispatch(setExtractionStatus("Error in dataset creation"));
dispatch(resetFileToDefault());
dispatch(resetDatasetToDefault());
Expand Down
8 changes: 4 additions & 4 deletions src/actions/dashboard.js
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ export function checkAuthenticationStatus() {
return async (dispatch) => {
dispatch(setAuthenticationLoading(true));
try {
const response = await fetch('/isAuthenticated', {
method: 'GET',
credentials: 'include',
const response = await fetch("/isAuthenticated", {
method: "GET",
credentials: "include",
});
const data = await response.json();
dispatch(setAuthenticationStatus(data.isAuthenticated));
Expand All @@ -78,7 +78,7 @@ export function checkAuthenticationStatus() {
dispatch(setUserCategory(SET_USER_CATEGORY, "author"));
}
} catch (error) {
console.error('Error checking authentication status:', error);
// console.error("Error in checking authentication status", error);
dispatch(setAuthenticationStatus(false));
dispatch(setUserCategory(SET_USER_CATEGORY, "author"));
} finally {
Expand Down
82 changes: 39 additions & 43 deletions src/actions/dataset.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// dataset actions

import {getHeader} from "../utils/common";
import {createEmptyDatasetRequest, getDatasetsRequest} from "../utils/dataset";
import {getDatasetsRequest} from "../utils/dataset";

// receive datasets action
export const RECEIVE_DATASETS = "RECEIVE_DATASETS";
Expand Down Expand Up @@ -29,9 +29,9 @@ export const DELETE_DATASET = "DELETE_DATASET";
// update dataset status action
export const UPDATE_DATASET_STATUS = "UPDATE_DATASET_STATUS";
export const updateDatasetStatus = (datasetId, status) => ({
type: UPDATE_DATASET_STATUS,
datasetId,
status
type: UPDATE_DATASET_STATUS,
datasetId,
status
});

// fetchDatasets thunk function
Expand All @@ -43,34 +43,34 @@ export const fetchDatasets = (title = null, limit="5") => async dispatch => {
};

export function fetchFilesInDataset(id) {
let url = `/api/datasets/${id}/files`;
const url = `/api/datasets/${id}/files`;
return (dispatch) => {
return fetch(url, {mode: "cors"})
.then((response) => {
if (response.status === 200) {
response.json().then(json => {
dispatch(receiveFilesInDataset(RECEIVE_FILES_IN_DATASET, json));
});
} else {
dispatch(receiveFilesInDataset(RECEIVE_FILES_IN_DATASET, []));
}
});
.then((response) => {
if (response.status === 200) {
response.json().then(json => {
dispatch(receiveFilesInDataset(RECEIVE_FILES_IN_DATASET, json));
});
} else {
dispatch(receiveFilesInDataset(RECEIVE_FILES_IN_DATASET, []));
}
});
};
}

export function fetchDatasetAbout(id) {
let url = `/api/datasets/${id}`;
const url = `/api/datasets/${id}`;
return (dispatch) => {
return fetch(url, {mode: "cors"})
.then((response) => {
if (response.status === 200) {
response.json().then(json => {
dispatch(receiveDatasetAbout(RECEIVE_DATASET_ABOUT, json));
});
} else {
dispatch(receiveDatasetAbout(RECEIVE_DATASET_ABOUT, []));
}
});
.then((response) => {
if (response.status === 200) {
response.json().then(json => {
dispatch(receiveDatasetAbout(RECEIVE_DATASET_ABOUT, json));
});
} else {
dispatch(receiveDatasetAbout(RECEIVE_DATASET_ABOUT, []));
}
});
};
}

Expand All @@ -86,10 +86,10 @@ export function setDatasetMetadata(type, json) {
}

export function postDatasetMetadata(id, metadata) {
let url = `/api/datasets/${id}/metadata.jsonld`;
let authHeader = getHeader();
authHeader.append('Accept', 'application/json');
authHeader.append('Content-Type', 'application/json');
const url = `/api/datasets/${id}/metadata.jsonld`;
const authHeader = getHeader();
authHeader.append("Accept", "application/json");
authHeader.append("Content-Type", "application/json");
const body = JSON.stringify(metadata);
return (dispatch) => {
return fetch(url, {method:"POST", mode: "cors", headers: authHeader, body:body})
Expand All @@ -104,24 +104,20 @@ export function postDatasetMetadata(id, metadata) {
}

export function deleteDataset(datasetId) {
let url = `/api/datasets/${datasetId}?superAdmin=true`;
const url = `/api/datasets/${datasetId}?superAdmin=true`;
return (dispatch) => {
return fetch(url, {mode: "cors", method: "DELETE"})
.then((response) => {
if (response.status === 200) {
response.json().then(json => {
dispatch({
type: DELETE_DATASET,
dataset: {"id": datasetId},
receivedAt: Date.now(),
.then((response) => {
if (response.status === 200) {
response.json().then(() => {
dispatch({
type: DELETE_DATASET,
dataset: {"id": datasetId},
receivedAt: Date.now(),
});
});
});
} else {
response.json().then(json => {
console.error("Failed to delete dataset:", json);
});
}
});
}
});
};
}

Expand Down
Loading