Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update Lightning Examples #487

Merged
merged 7 commits into from
Dec 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@
"outputs": [],
"source": [
"# Install some dependencies\n",
"!pip install pandas torch pytorch-lightning transformers==4.1.1 -q\n",
"!pip install pandas torch lightning transformers\n",
"!pip install -Uq wandb"
]
},
Expand All @@ -81,7 +81,7 @@
"import transformers\n",
"import numpy as np\n",
"import pandas as pd\n",
"import pytorch_lightning as pl"
"import lightning.pytorch as pl"
]
},
{
Expand Down Expand Up @@ -245,7 +245,7 @@
" \n",
" # Download the raw cola data from the 'zipfile' reference we added to the cola-raw artifact.\n",
" raw_data_artifact = run.use_artifact(\"cola-raw:latest\")\n",
" zip_path = raw_data_artifact.get_path(\"zipfile\").download()\n",
" zip_path = raw_data_artifact.get_entry(\"zipfile\").download()\n",
" !unzip -o $zip_path # jupyter hack to unzip data :P\n",
" \n",
" # Read in the raw data, log it to W&B as a wandb.Table\n",
Expand Down Expand Up @@ -298,7 +298,7 @@
"\n",
" # Download the preprocessed data\n",
" pp_data_artifact = run.use_artifact(\"preprocessed-data:latest\")\n",
" data_path = pp_data_artifact.get_path(\"dataset\").download()\n",
" data_path = pp_data_artifact.get_entry(\"dataset\").download()\n",
" dataset = torch.load(data_path)\n",
"\n",
" # Calculate the number of samples to include in each set.\n",
Expand Down Expand Up @@ -410,8 +410,8 @@
"\n",
" # Load the datasets from the split-dataset artifact\n",
" data = run.use_artifact(\"split-dataset:latest\")\n",
" train_dataset = torch.load(data.get_path(\"train-data\").download())\n",
" val_dataset = torch.load(data.get_path(\"validation-data\").download())\n",
" train_dataset = torch.load(data.get_entry(\"train-data\").download())\n",
" val_dataset = torch.load(data.get_entry(\"validation-data\").download())\n",
"\n",
" # Extract the config object associated with the run\n",
" config = run.config\n",
Expand All @@ -426,7 +426,7 @@
" gpus = -1 if torch.cuda.is_available() else 0\n",
" \n",
" # Construct a Trainer object with the W&B logger we created and epoch set by the config object\n",
" trainer = pl.Trainer(max_epochs=config.epochs, gpus=gpus, logger=logger)\n",
" trainer = pl.Trainer(max_epochs=config.epochs, logger=logger)\n",
" \n",
" # Build data loaders for our datasets, using the batch_size from our config object\n",
" train_data_loader = torch.utils.data.DataLoader(train_dataset, batch_size=config.batch_size)\n",
Expand Down Expand Up @@ -536,6 +536,18 @@
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.2"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"\n",
"# Image Classification using PyTorch Lightning ⚡️\n",
"\n",
"We will build an image classification pipeline using PyTorch Lightning. We will follow this [style guide](https://pytorch-lightning.readthedocs.io/en/stable/starter/style_guide.html) to increase the readability and reproducibility of our code. A cool explanation of this available [here](https://wandb.ai/wandb/wandb-lightning/reports/Image-Classification-using-PyTorch-Lightning--VmlldzoyODk1NzY)."
"We will build an image classification pipeline using PyTorch Lightning. We will follow this [style guide](https://lightning.ai/docs/pytorch/stable/starter/style_guide.html) to increase the readability and reproducibility of our code. A cool explanation of this available [here](https://wandb.ai/wandb/wandb-lightning/reports/Image-Classification-using-PyTorch-Lightning--VmlldzoyODk1NzY)."
]
},
{
Expand All @@ -46,7 +46,7 @@
"metadata": {},
"outputs": [],
"source": [
"!pip install pytorch-lightning -q\n",
"!pip install lightning torchvision -q\n",
"# install weights and biases\n",
"!pip install wandb -qU"
]
Expand All @@ -65,9 +65,9 @@
"metadata": {},
"outputs": [],
"source": [
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"# your favorite machine learning tracking tool\n",
"from pytorch_lightning.loggers import WandbLogger\n",
"from lightning.pytorch.loggers import WandbLogger\n",
"\n",
"import torch\n",
"from torch import nn\n",
Expand Down Expand Up @@ -115,7 +115,7 @@
"- Apply transforms (rotate, tokenize, etc…).\n",
"- Wrap inside a DataLoader.\n",
"\n",
"Learn more about datamodules [here](https://pytorch-lightning.readthedocs.io/en/stable/extensions/datamodules.html). Let's build a datamodule for the Cifar-10 dataset. "
"Learn more about datamodules [here](https://lightning.ai/docs/pytorch/stable/data/datamodule.html). Let's build a datamodule for the Cifar-10 dataset. "
]
},
{
Expand Down Expand Up @@ -168,8 +168,8 @@
"source": [
"## 📱 Callbacks\n",
"\n",
"A callback is a self-contained program that can be reused across projects. PyTorch Lightning comes with few [built-in callbacks](https://pytorch-lightning.readthedocs.io/en/latest/extensions/callbacks.html#built-in-callbacks) which are regularly used. \n",
"Learn more about callbacks in PyTorch Lightning [here](https://pytorch-lightning.readthedocs.io/en/latest/extensions/callbacks.html)."
"A callback is a self-contained program that can be reused across projects. PyTorch Lightning comes with few [built-in callbacks](https://lightning.ai/docs/pytorch/latest/extensions/callbacks.html#built-in-callbacks) which are regularly used. \n",
"Learn more about callbacks in PyTorch Lightning [here](https://lightning.ai/docs/pytorch/latest/extensions/callbacks.html)."
]
},
{
Expand All @@ -179,7 +179,7 @@
"source": [
"### Built-in Callbacks\n",
"\n",
"In this tutorial, we will use [Early Stopping](https://pytorch-lightning.readthedocs.io/en/latest/api/pytorch_lightning.callbacks.EarlyStopping.html#pytorch_lightning.callbacks.EarlyStopping) and [Model Checkpoint](https://pytorch-lightning.readthedocs.io/en/latest/api/pytorch_lightning.callbacks.ModelCheckpoint.html#pytorch_lightning.callbacks.ModelCheckpoint) built-in callbacks. They can be passed to the `Trainer`.\n"
"In this tutorial, we will use [Early Stopping](https://lightning.ai/docs/pytorch/latest/api/lightning.pytorch.callbacks.EarlyStopping.html#lightning.callbacks.EarlyStopping) and [Model Checkpoint](https://lightning.ai/docs/pytorch/latest/api/lightning.pytorch.callbacks.ModelCheckpoint.html#pytorch_lightning.callbacks.ModelCheckpoint) built-in callbacks. They can be passed to the `Trainer`.\n"
]
},
{
Expand Down Expand Up @@ -437,7 +437,7 @@
"I hope you find this report helpful. I will encourage to play with the code and train an image classifier with a dataset of your choice. \n",
"\n",
"Here are some resources to learn more about PyTorch Lightning:\n",
"- [Step-by-step walk-through](https://pytorch-lightning.readthedocs.io/en/latest/starter/introduction.html) - This is one of the official tutorials. Their documentation is really well written and I highly encourage it as a good learning resource.\n",
"- [Step-by-step walk-through](https://lightning.ai/docs/pytorch/latest/starter/introduction.html) - This is one of the official tutorials. Their documentation is really well written and I highly encourage it as a good learning resource.\n",
"- [Use Pytorch Lightning with Weights & Biases](https://wandb.me/lightning) - This is a quick colab that you can run through to learn more about how to use W&B with PyTorch Lightning."
]
}
Expand All @@ -452,6 +452,18 @@
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.2"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
"Coupled with the [Weights & Biases integration](https://docs.wandb.com/library/integrations/lightning), you can quickly train and monitor models for full traceability and reproducibility with only 2 extra lines of code:\n",
"\n",
"```python\n",
"from pytorch_lightning.loggers import WandbLogger\n",
"from pytorch_lightning import Trainer\n",
"from lightning.pytorch.loggers import WandbLogger\n",
"from lightning.pytorch import Trainer\n",
"\n",
"wandb_logger = WandbLogger()\n",
"trainer = Trainer(logger=wandb_logger)\n",
Expand Down Expand Up @@ -64,7 +64,7 @@
"metadata": {},
"outputs": [],
"source": [
"!pip install -q pytorch-lightning wandb"
"!pip install -q lightning wandb torchvision"
]
},
{
Expand Down Expand Up @@ -150,6 +150,15 @@
"* Call self.log in `training_step` and `validation_step` to log the metrics"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import lightning.pytorch as pl"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -160,9 +169,8 @@
"from torch.nn import Linear, CrossEntropyLoss, functional as F\n",
"from torch.optim import Adam\n",
"from torchmetrics.functional import accuracy\n",
"from pytorch_lightning import LightningModule\n",
"\n",
"class MNIST_LitModule(LightningModule):\n",
"class MNIST_LitModule(pl.LightningModule):\n",
"\n",
" def __init__(self, n_classes=10, n_layer_1=128, n_layer_2=256, lr=1e-3):\n",
" '''method used to define our model parameters'''\n",
Expand Down Expand Up @@ -273,7 +281,7 @@
"metadata": {},
"outputs": [],
"source": [
"from pytorch_lightning.callbacks import ModelCheckpoint\n",
"from lightning.pytorch.callbacks import ModelCheckpoint\n",
"\n",
"checkpoint_callback = ModelCheckpoint(monitor='val_accuracy', mode='max')"
]
Expand All @@ -284,9 +292,9 @@
"source": [
"## 💡 Tracking Experiments with WandbLogger\n",
"\n",
"PyTorch Lightning has a `WandbLogger` to easily log your experiments with Wights & Biases. Just pass it to your `Trainer` to log to W&B. See the [WandbLogger docs](https://pytorch-lightning.readthedocs.io/en/stable/extensions/generated/pytorch_lightning.loggers.WandbLogger.html#pytorch_lightning.loggers.WandbLogger) for all parameters. Note, to log the metrics to a specific W&B Team, pass your Team name to the `entity` argument in `WandbLogger`\n",
"PyTorch Lightning has a `WandbLogger` to easily log your experiments with Wights & Biases. Just pass it to your `Trainer` to log to W&B. See the [WandbLogger docs](https://lightning.ai/docs/pytorch/stable/extensions/generated/pytorch_lightning.loggers.WandbLogger.html#pytorch_lightning.loggers.WandbLogger) for all parameters. Note, to log the metrics to a specific W&B Team, pass your Team name to the `entity` argument in `WandbLogger`\n",
"\n",
"#### `pytorch_lightning.loggers.WandbLogger()`\n",
"#### `lightning.pytorch.loggers.WandbLogger()`\n",
"\n",
"| Functionality | Argument/Function | PS |\n",
"| ------ | ------ | ------ |\n",
Expand All @@ -295,9 +303,9 @@
"| Organize runs by project | `WandbLogger(... ,project='my_project')` | |\n",
"| Log histograms of gradients and parameters | `WandbLogger.watch(model)` | `WandbLogger.watch(model, log='all')` to log parameter histograms |\n",
"| Log hyperparameters | Call `self.save_hyperparameters()` within `LightningModule.__init__()` |\n",
"| Log custom objects (images, audio, video, molecules…) | Use `WandbLogger.log_text`, `WandbLogger.log_image` and `WandbLogger.log_table` |\n",
"| Log custom objects (images, audio, video, molecules…) | Use `WandbLogger.log_text`, `WandbLogger.log_image` and `WandbLogger.log_table`, etc. |\n",
"\n",
"See the [WandbLogger docs](https://pytorch-lightning.readthedocs.io/en/stable/extensions/generated/pytorch_lightning.loggers.WandbLogger.html#pytorch_lightning.loggers.WandbLogger) here for all parameters. "
"See the [WandbLogger docs](https://lightning.ai/docs/pytorch/stable/extensions/generated/pytorch_lightning.loggers.WandbLogger.html#pytorch_lightning.loggers.WandbLogger) here for all parameters. "
]
},
{
Expand All @@ -306,8 +314,8 @@
"metadata": {},
"outputs": [],
"source": [
"from pytorch_lightning.loggers import WandbLogger\n",
"from pytorch_lightning import Trainer\n",
"from lightning.pytorch.loggers import WandbLogger\n",
"from lightning.pytorch import Trainer\n",
"\n",
"wandb_logger = WandbLogger(project='MNIST', # group runs in \"MNIST\" project\n",
" log_model='all') # log all new checkpoints during training"
Expand All @@ -334,12 +342,12 @@
"metadata": {},
"outputs": [],
"source": [
"from pytorch_lightning.callbacks import Callback\n",
"from lightning.pytorch.callbacks import Callback\n",
" \n",
"class LogPredictionsCallback(Callback):\n",
" \n",
" def on_validation_batch_end(\n",
" self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx):\n",
" self, trainer, pl_module, outputs, batch, batch_idx):\n",
" \"\"\"Called when the validation batch ends.\"\"\"\n",
" \n",
" # `outputs` comes from `LightningModule.validation_step`\n",
Expand Down
6 changes: 3 additions & 3 deletions colabs/pytorch-lightning/Profile_PyTorch_Code.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@
"metadata": {},
"outputs": [],
"source": [
"!pip install -q wandb pytorch_lightning torch_tb_profiler"
"!pip install -q wandb lightning torch_tb_profiler torchvision"
]
},
{
Expand All @@ -99,7 +99,7 @@
"source": [
"import glob\n",
"\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"import torch\n",
"import torch.nn as nn\n",
"import torch.nn.functional as F\n",
Expand Down Expand Up @@ -357,7 +357,7 @@
" with profiler:\n",
" profiler_callback = TorchTensorboardProfilerCallback(profiler)\n",
"\n",
" trainer = pl.Trainer(gpus=1, max_epochs=1, max_steps=total_steps,\n",
" trainer = pl.Trainer(max_epochs=1, max_steps=total_steps,\n",
" logger=pl.loggers.WandbLogger(log_model=True, save_code=True),\n",
" callbacks=[profiler_callback], precision=wandb.config.precision)\n",
"\n",
Expand Down
Loading