-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Refactoring Schnet and preparing datasets/models for Pytorch Lightnin…
…g compatibility (#8) * first outline of base NNP class * reference schnet impoementation * Lighningmodule * training loop * pad with 0 for embedding * updating pooling for batches * updating docstrings
- Loading branch information
Showing
16 changed files
with
647 additions
and
426 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -21,7 +21,6 @@ dependencies: | |
- torchvision | ||
- openff-units | ||
- pint | ||
- ase | ||
|
||
# Testing | ||
- pytest | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,2 @@ | ||
from .schnet import Schnet | ||
from .utils import Dense, GaussianRBF, cosine_cutoff, shifted_softplus, scatter_add | ||
from .utils import GaussianRBF, cosine_cutoff, shifted_softplus, scatter_add |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,67 +1,112 @@ | ||
from typing import Dict, List, Optional | ||
from typing import Dict | ||
|
||
import lightning as pl | ||
import torch | ||
import torch.nn as nn | ||
from torch.optim import AdamW | ||
|
||
from modelforge.utils import Inputs, Properties, SpeciesEnergies | ||
from modelforge.utils import SpeciesEnergies | ||
|
||
|
||
class BaseNNP(nn.Module): | ||
class BaseNNP(pl.LightningModule): | ||
""" | ||
Abstract base class for neural network potentials. | ||
This class defines the overall structure and ensures that subclasses | ||
implement the `calculate_energies_and_forces` method. | ||
Methods | ||
------- | ||
forward(inputs: dict) -> SpeciesEnergies: | ||
Forward pass for the neural network potential. | ||
calculate_energy(inputs: dict) -> torch.Tensor: | ||
Placeholder for the method that should calculate energies and forces. | ||
training_step(batch, batch_idx) -> torch.Tensor: | ||
Defines the train loop. | ||
configure_optimizers() -> AdamW: | ||
Configures the optimizer. | ||
""" | ||
|
||
def __init__(self, dtype: torch.dtype, device: torch.device): | ||
def __init__(self): | ||
""" | ||
Initialize the NeuralNetworkPotential class. | ||
Parameters | ||
---------- | ||
dtype : torch.dtype | ||
Data type for the PyTorch tensors. | ||
device : torch.device | ||
Device ("cpu" or "cuda") on which computations will be performed. | ||
Initialize the NNP class. | ||
""" | ||
super().__init__() | ||
self.dtype = dtype | ||
self.device = device | ||
|
||
def forward( | ||
self, | ||
inputs: Inputs, | ||
) -> SpeciesEnergies: | ||
def forward(self, inputs: Dict[str, torch.Tensor]) -> SpeciesEnergies: | ||
""" | ||
Forward pass for the neural network potential. | ||
Parameters | ||
---------- | ||
inputs : Inputs | ||
An instance of the Inputs data class containing atomic numbers, positions, etc. | ||
inputs : dict | ||
A dictionary containing atomic numbers, positions, etc. | ||
Returns | ||
------- | ||
SpeciesEnergies | ||
An instance of the SpeciesEnergies data class containing species and calculated energies. | ||
""" | ||
assert isinstance(inputs, Dict) # | ||
E = self.calculate_energy(inputs) | ||
return SpeciesEnergies(inputs["Z"], E) | ||
|
||
E = self.calculate_energies_and_forces(inputs) | ||
return SpeciesEnergies(inputs.Z, E) | ||
|
||
def calculate_energies_and_forces(self, inputs: Optional[Inputs] = None): | ||
def calculate_energy(self, inputs: Dict[str, torch.Tensor]) -> torch.Tensor: | ||
""" | ||
Placeholder for the method that should calculate energies and forces. | ||
This method should be implemented in subclasses. | ||
Parameters | ||
---------- | ||
inputs : dict | ||
A dictionary containing atomic numbers, positions, etc. | ||
Returns | ||
------- | ||
torch.Tensor | ||
The calculated energy tensor. | ||
Raises | ||
------ | ||
NotImplementedError | ||
If the method is not overridden in the subclass. | ||
""" | ||
raise NotImplementedError("Subclasses must implement this method.") | ||
|
||
def training_step( | ||
self, batch: Dict[str, torch.Tensor], batch_idx: int | ||
) -> torch.Tensor: | ||
""" | ||
Defines the training loop. | ||
Parameters | ||
---------- | ||
batch : dict | ||
Batch data. | ||
batch_idx : int | ||
Batch index. | ||
Returns | ||
------- | ||
torch.Tensor | ||
The loss tensor. | ||
""" | ||
|
||
E_hat = self.forward(batch) # wrap_vals_from_dataloader(batch)) | ||
loss = nn.functional.mse_loss(E_hat.energies, batch["E"]) | ||
# Logging to TensorBoard (if installed) by default | ||
self.log("train_loss", loss) | ||
return loss | ||
|
||
def configure_optimizers(self) -> AdamW: | ||
""" | ||
Configures the optimizer for training. | ||
Returns | ||
------- | ||
AdamW | ||
The AdamW optimizer. | ||
""" | ||
|
||
optimizer = AdamW(self.parameters(), lr=1e-3) | ||
return optimizer |
Oops, something went wrong.