Skip to content

Commit 49fa533

Browse files
Abhishek9639Abhishek
andauthored
[DOC] add missing torch entries in README and documentation (#239)
## Description Added the missing `torch` integration entries in the README feature table and created a full example page for TorchExperiment, following the pattern of existing sklearn/sktime integrations. ## Related Issues Fixes #211 ## Type of Change - [x] `[DOC]` - Documentation changes ## How was this solved? - Added `torch` entry to the integrations table in examples/integrations/README.md - Created torch_experiment_example.py with a working example using TorchExperiment+ `HillClimbing` optimizer - Added a "PyTorch Lightning Integration" section in the Sphinx docs (docs/source/examples/integrations.rst) - Updated the README feature table's third column to "Problems & Experiments" as suggested in the issue ## Checklist - [x] PR title includes appropriate tag: `[BUG]`, `[ENH]`, `[DOC]` or `[MNT]` - [x] Linked to related issue (if applicable) - [x] Code passes `make check` (lint, format, isort) - [x] Tests added/updated for changes (if applicable) - [x] Documentation updated (if applicable) ## Testing The new example is auto-discovered by examples/test_examples.py and passes. --------- Co-authored-by: Abhishek <abhishekup082gmail.com@Abhisheks-MacBook-Air.local>
1 parent 5e7ece8 commit 49fa533

2 files changed

Lines changed: 136 additions & 0 deletions

File tree

docs/source/examples/integrations.rst

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,31 @@ For time series forecasting and classification with sktime:
3333
pip install hyperactive[sktime-integration]
3434
3535
36+
PyTorch Lightning Integration
37+
-----------------------------
38+
39+
For deep learning hyperparameter optimization with PyTorch Lightning:
40+
41+
.. list-table::
42+
:header-rows: 1
43+
:widths: 30 70
44+
45+
* - Use Case
46+
- Example
47+
* - PyTorch Lightning Experiment
48+
- `torch_experiment_example.py <https://github.com/SimonBlanke/Hyperactive/blob/master/examples/integrations/torch_experiment_example.py>`_
49+
50+
.. note::
51+
52+
PyTorch Lightning integration requires additional dependencies:
53+
54+
.. code-block:: bash
55+
56+
pip install hyperactive[all_extras]
57+
58+
or install ``torch`` and ``lightning`` separately.
59+
60+
3661
Installing Extras
3762
-----------------
3863

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
"""Example: Hyperparameter tuning with TorchExperiment and PyTorch Lightning.
2+
3+
This example demonstrates how to use the TorchExperiment class to
4+
optimize hyperparameters of a PyTorch Lightning model using Hyperactive.
5+
"""
6+
7+
import lightning as L
8+
import numpy as np
9+
import torch
10+
from torch import nn
11+
from torch.utils.data import DataLoader
12+
13+
from hyperactive.experiment.integrations import TorchExperiment
14+
from hyperactive.opt.gfo import HillClimbing
15+
16+
17+
# 1. Define a Lightning Module
18+
class SimpleLightningModule(L.LightningModule):
19+
"""Simple classification model for demonstration."""
20+
21+
def __init__(self, input_dim=10, hidden_dim=16, lr=1e-3):
22+
super().__init__()
23+
self.save_hyperparameters()
24+
self.model = nn.Sequential(
25+
nn.Linear(input_dim, hidden_dim),
26+
nn.ReLU(),
27+
nn.Linear(hidden_dim, 2),
28+
)
29+
self.lr = lr
30+
31+
def forward(self, x):
32+
"""Forward pass."""
33+
return self.model(x)
34+
35+
def training_step(self, batch, batch_idx):
36+
"""Training step."""
37+
x, y = batch
38+
y_hat = self(x)
39+
loss = nn.functional.cross_entropy(y_hat, y)
40+
self.log("train_loss", loss)
41+
return loss
42+
43+
def validation_step(self, batch, batch_idx):
44+
"""Validate a single batch."""
45+
x, y = batch
46+
y_hat = self(x)
47+
val_loss = nn.functional.cross_entropy(y_hat, y)
48+
self.log("val_loss", val_loss, on_epoch=True)
49+
return val_loss
50+
51+
def configure_optimizers(self):
52+
"""Configure optimizers."""
53+
return torch.optim.Adam(self.parameters(), lr=self.lr)
54+
55+
56+
# 2. Define a DataModule
57+
class RandomDataModule(L.LightningDataModule):
58+
"""Random data module for demonstration."""
59+
60+
def __init__(self, batch_size=32):
61+
super().__init__()
62+
self.batch_size = batch_size
63+
64+
def setup(self, stage=None):
65+
"""Set up train and validation datasets."""
66+
dataset = torch.utils.data.TensorDataset(
67+
torch.randn(200, 10),
68+
torch.randint(0, 2, (200,)),
69+
)
70+
self.train, self.val = torch.utils.data.random_split(dataset, [160, 40])
71+
72+
def train_dataloader(self):
73+
"""Return training dataloader."""
74+
return DataLoader(self.train, batch_size=self.batch_size)
75+
76+
def val_dataloader(self):
77+
"""Return validation dataloader."""
78+
return DataLoader(self.val, batch_size=self.batch_size)
79+
80+
81+
# 3. Create the TorchExperiment
82+
datamodule = RandomDataModule(batch_size=16)
83+
datamodule.setup()
84+
85+
experiment = TorchExperiment(
86+
datamodule=datamodule,
87+
lightning_module=SimpleLightningModule,
88+
trainer_kwargs={
89+
"max_epochs": 3,
90+
"enable_progress_bar": False,
91+
"enable_model_summary": False,
92+
"logger": False,
93+
},
94+
objective_metric="val_loss",
95+
)
96+
97+
# 4. Define search space and optimizer
98+
search_space = {
99+
"hidden_dim": [16, 32, 64, 128],
100+
"lr": np.logspace(-4, -1, 10).tolist(),
101+
}
102+
103+
optimizer = HillClimbing(
104+
search_space=search_space,
105+
n_iter=5,
106+
experiment=experiment,
107+
)
108+
109+
# 5. Run optimization
110+
best_params = optimizer.solve()
111+
print(f"Best params: {best_params}")

0 commit comments

Comments
 (0)