diff --git a/dockers/base-cuda/Dockerfile b/dockers/base-cuda/Dockerfile
index 852120a..1af14ab 100644
--- a/dockers/base-cuda/Dockerfile
+++ b/dockers/base-cuda/Dockerfile
@@ -89,7 +89,7 @@ RUN \
         # ... pytorch patch version
         # pip install torch==1.11.1+cu113 torchvision==0.11.3+cu113 -f https://download.pytorch.org/whl/cu113/torch_stable.html; \
         # ... pytorch nightly dev version
-        pip install --pre torch==2.4.0.dev20240515  torchvision==0.19.0.dev20240515 -f https://download.pytorch.org/whl/nightly/cu121/torch_nightly.html; \
+        pip install --pre torch==2.4.0.dev20240601  torchvision==0.19.0.dev20240601 -f https://download.pytorch.org/whl/nightly/cu121/torch_nightly.html; \
         # ... test channel
         # pip install --pre torch torchvision -f https://download.pytorch.org/whl/test/cu121/torch_test.html; \
     fi && \
diff --git a/requirements/base.txt b/requirements/base.txt
index c7143dd..74919f0 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -1,5 +1,5 @@
 #lightning>=2.4.0,<2.4.1
 # the below is uncommented when master is targeting a specific pl dev master commit
-git+https://github.com/Lightning-AI/lightning.git@0f12271d7feeacb6fbe5d70d2ce057da4a04d8b4#egg=lightning
+git+https://github.com/Lightning-AI/lightning.git@a99a6d3af1e9b8090d892dfc24b4f616853a8a40#egg=lightning
 torch>=2.1.0
-mpmath<1.4.0  # temporary requirement to avoid installation of alpha version of mpmath
+#mpmath<1.4.0  # temporary requirement to avoid installation of alpha version of mpmath
diff --git a/requirements/standalone_base.txt b/requirements/standalone_base.txt
index b783425..8bd32f5 100644
--- a/requirements/standalone_base.txt
+++ b/requirements/standalone_base.txt
@@ -1,5 +1,5 @@
 #pytorch-lightning>=2.4.0,<2.4.1
 # the below is uncommented when master is targeting a specific pl dev master commit
-git+https://github.com/Lightning-AI/pytorch-lightning.git@0f12271d7feeacb6fbe5d70d2ce057da4a04d8b4#egg=pytorch-lightning
+git+https://github.com/Lightning-AI/pytorch-lightning.git@a99a6d3af1e9b8090d892dfc24b4f616853a8a40#egg=pytorch-lightning
 torch>=2.1.0
-mpmath<1.4.0  # temporary requirement to avoid installation of alpha version of mpmath
+#mpmath<1.4.0  # temporary requirement to avoid installation of alpha version of mpmath
diff --git a/setup.py b/setup.py
index 3830ac7..477e0c1 100755
--- a/setup.py
+++ b/setup.py
@@ -135,7 +135,7 @@ def _setup_args(standalone: bool = False) -> Dict[str, Any]:
         _INSTALL_PATHS["require"],
         file_name=base_reqs,
         standalone=standalone,
-        pl_commit="0f12271d7feeacb6fbe5d70d2ce057da4a04d8b4",
+        pl_commit="a99a6d3af1e9b8090d892dfc24b4f616853a8a40",
     )
     base_setup["install_requires"] = install_requires
     return base_setup
diff --git a/tests/test_fsdp.py b/tests/test_fsdp.py
index f6d27fe..68e4559 100644
--- a/tests/test_fsdp.py
+++ b/tests/test_fsdp.py
@@ -68,15 +68,15 @@
 
 additional_fsdp_warns = [
     "The number of training batches",  # minimizing cost of training for these tests
-    "does not support loading the optimizer",  # with PyTorch 1.x Lightning lacks OSD restoration support
     "Please use torch.distributed.all_gather_into_tensor",  # still required for PyTorch/Lightning <=2.1
     "Please use torch.distributed.reduce_scatter_tensor",  # still required for PyTorch/Lightning <=2.1
     "when logging on epoch level in distributed",  # validating FTS handling in this scenario
+    ".*torch.cpu.amp.autocast.*",  # temporarily req for 20240601 nightly, likey removable w/ PT 2.4 release
 ]
 EXPECTED_WARNS.extend(additional_fsdp_warns)
 FSDP_BASE_WARNS = EXPECTED_WARNS
 FSDP_DYNAMO_EXPECTED_WARNS = [
-    "Final phase max_transition_epoch",  # still required for PyTorch/Lightning <=2.0
+    "Final phase max_transition_epoch",  # still required for PyTorch/Lightning <=2.4
 ]
 
 ##########################