[Easy] Unused var in functional_adam (#88292)

Fixes #ISSUE_NUMBER

Pull Request resolved: https://github.com/pytorch/pytorch/pull/88292
Approved by: https://github.com/awgu
diff --git a/torch/distributed/optim/functional_adam.py b/torch/distributed/optim/functional_adam.py
index 72001f1..92b749a 100644
--- a/torch/distributed/optim/functional_adam.py
+++ b/torch/distributed/optim/functional_adam.py
@@ -66,7 +66,6 @@
         Similar to step, but operates on a single parameter and optionally a
         gradient tensor.
         """
-        params = [param]
         params_with_grad = []
         grads = []
         exp_avgs = []