Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
129 changes: 0 additions & 129 deletions ArunTest.py

This file was deleted.

5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@

![License](https://img.shields.io/badge/license-MIT-blue.svg)
![PyPI](https://img.shields.io/pypi/v/bioneuralnet)
![Python Versions](https://img.shields.io/pypi/pyversions/bioneuralnet)
![GitHub Issues](https://img.shields.io/github/issues/UCD-BDLab/BioNeuralNet)
![GitHub Contributors](https://img.shields.io/github/contributors/UCD-BDLab/BioNeuralNet)
![Downloads](https://static.pepy.tech/badge/bioneuralnet)

## Welcome to BioNeuralNet Beta 0.1

## Welcome to [BioNeuralNet Beta 0.1](https://bioneuralnet.readthedocs.io/en/latest/index.html)

![BioNeuralNet Logo](/assets/LOGO_WB.png)

Expand Down
42 changes: 21 additions & 21 deletions bioneuralnet/downstream_task/dpmon.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(
omics_list: List[pd.DataFrame],
phenotype_data: pd.DataFrame,
clinical_data: pd.DataFrame,
model: str = "GCN",
model: str = "GAT",
gnn_hidden_dim: int = 16,
layer_num: int = 5,
nn_hidden_dim1: int = 8,
Expand All @@ -55,10 +55,10 @@ def __init__(
if (
phenotype_data is None
or phenotype_data.empty
or "finalgold_visit" not in phenotype_data.columns
or "phenotype" not in phenotype_data.columns
):
raise ValueError(
"Phenotype data must contain 'finalgold_visit' and cannot be empty."
"Phenotype data must contain 'phenotype' and cannot be empty."
)

if not omics_list or any(df.empty for df in omics_list):
Expand All @@ -84,7 +84,6 @@ def __init__(
self.gpu = gpu
self.cuda = cuda
self.output_dir = output_dir if output_dir else f"dpmon_output"
# f"dpmon_output_{os.getpid()}" for unique output directory

os.makedirs(self.output_dir, exist_ok=True)
logger.info("Initialized DPMON with the provided parameters.")
Expand All @@ -96,7 +95,7 @@ def run(self) -> pd.DataFrame:
**Steps:**

1. **Combining Omics and Phenotype Data**:
- Merges the provided omics datasets and ensures that the phenotype (`finalgold_visit`) column is included.
- Merges the provided omics datasets and ensures that the phenotype (`phenotype`) column is included.

2. **Tuning or Training**:
- **Tuning**: If `tune=True`, performs hyperparameter tuning using Ray Tune and returns an empty DataFrame.
Expand Down Expand Up @@ -145,10 +144,11 @@ def run(self) -> pd.DataFrame:
"tune": self.tune,
}

# Combine omics datasets
combined_omics = pd.concat(self.omics_list, axis=1)
if "finalgold_visit" not in combined_omics.columns:
if "phenotype" not in combined_omics.columns:
combined_omics = combined_omics.merge(
self.phenotype_data[["finalgold_visit"]],
self.phenotype_data[["phenotype"]],
left_index=True,
right_index=True,
)
Expand Down Expand Up @@ -208,7 +208,7 @@ def slice_omics_datasets(
logger.error(f"Nodes missing in omics data: {missing_nodes}")
raise ValueError("Missing nodes in omics dataset.")

selected_columns = omics_network_nodes_names + ["finalgold_visit"]
selected_columns = omics_network_nodes_names + ["phenotype"]
return [omics_dataset[selected_columns]]


Expand Down Expand Up @@ -288,10 +288,10 @@ def run_standard_training(
gnn_hidden_dim=dpmon_params["gnn_hidden_dim"],
gnn_layer_num=dpmon_params["layer_num"],
ae_encoding_dim=1,
nn_input_dim=omics_data.drop(["finalgold_visit"], axis=1).shape[1],
nn_input_dim=omics_data.drop(["phenotype"], axis=1).shape[1],
nn_hidden_dim1=dpmon_params["nn_hidden_dim1"],
nn_hidden_dim2=dpmon_params["nn_hidden_dim2"],
nn_output_dim=omics_data["finalgold_visit"].nunique(),
nn_output_dim=omics_data["phenotype"].nunique(),
).to(device)

criterion = nn.CrossEntropyLoss()
Expand All @@ -302,12 +302,12 @@ def run_standard_training(
)

train_features = torch.FloatTensor(
omics_data.drop(["finalgold_visit"], axis=1).values
omics_data.drop(["phenotype"], axis=1).values
).to(device)
train_labels = {
"labels": torch.LongTensor(
omics_data["finalgold_visit"].values.copy()
).to(device),
"labels": torch.LongTensor(omics_data["phenotype"].values.copy()).to(
device
),
"omics_network": omics_network.to(device),
}

Expand All @@ -333,7 +333,7 @@ def run_standard_training(
)
predictions_df = pd.DataFrame(
{
"Actual": omics_data["finalgold_visit"].values,
"Actual": omics_data["phenotype"].values,
"Predicted": predicted.cpu().numpy(),
}
)
Expand Down Expand Up @@ -391,10 +391,10 @@ def tune_train_n(config):
gnn_hidden_dim=config["gnn_hidden_dim"],
gnn_layer_num=config["gnn_layer_num"],
ae_encoding_dim=1,
nn_input_dim=omics_data.drop(["finalgold_visit"], axis=1).shape[1],
nn_input_dim=omics_data.drop(["phenotype"], axis=1).shape[1],
nn_hidden_dim1=config["nn_hidden_dim1"],
nn_hidden_dim2=config["nn_hidden_dim2"],
nn_output_dim=omics_data["finalgold_visit"].nunique(),
nn_output_dim=omics_data["phenotype"].nunique(),
).to(device)

criterion = nn.CrossEntropyLoss()
Expand All @@ -403,12 +403,12 @@ def tune_train_n(config):
)

train_features = torch.FloatTensor(
omics_data.drop(["finalgold_visit"], axis=1).values
omics_data.drop(["phenotype"], axis=1).values
).to(device)
train_labels = {
"labels": torch.LongTensor(
omics_data["finalgold_visit"].values.copy()
).to(device),
"labels": torch.LongTensor(omics_data["phenotype"].values.copy()).to(
device
),
"omics_network": omics_network_tg.to(device),
}

Expand Down
2 changes: 1 addition & 1 deletion bioneuralnet/network_embedding/gnn_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def __init__(
omics_data: pd.DataFrame,
phenotype_data: pd.DataFrame,
clinical_data: pd.DataFrame,
phenotype_col: str = "finalgold_visit",
phenotype_col: str = "phenotype",
model_type: str = "GAT",
hidden_dim: int = 64,
layer_num: int = 2,
Expand Down
Loading
Loading