Skip to content

Commit c878bef

Browse files
committed
Make consitent doc string for step method.
1 parent 3f850d6 commit c878bef

File tree

10 files changed

+10
-20
lines changed

10 files changed

+10
-20
lines changed

torch_optimizer/accsgd.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
5959
r"""Performs a single optimization step.
6060
6161
Arguments:
62-
closure: A closure that reevaluates the model
63-
and returns the loss.
62+
closure: A closure that reevaluates the model and returns the loss.
6463
"""
6564
loss = None
6665
if closure is not None:

torch_optimizer/adabound.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,8 +84,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
8484
r"""Performs a single optimization step.
8585
8686
Arguments:
87-
closure (callable, optional): A closure that reevaluates the model
88-
and returns the loss.
87+
closure: A closure that reevaluates the model and returns the loss.
8988
"""
9089
loss = None
9190
if closure is not None:

torch_optimizer/adamod.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
6666
"""Performs a single optimization step.
6767
6868
Arguments:
69-
closure (callable, optional): A closure that reevaluates the model
70-
and returns the loss.
69+
closure: A closure that reevaluates the model and returns the loss.
7170
"""
7271
loss = None
7372
if closure is not None:

torch_optimizer/diffgrad.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
6060
r"""Performs a single optimization step.
6161
6262
Arguments:
63-
closure (callable, optional): A closure that reevaluates the model
64-
and returns the loss.
63+
closure: A closure that reevaluates the model and returns the loss.
6564
"""
6665
loss = None
6766
if closure is not None:

torch_optimizer/lamb.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,8 +63,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
6363
r"""Performs a single optimization step.
6464
6565
Arguments:
66-
closure (callable, optional): A closure that reevaluates the model
67-
and returns the loss.
66+
closure: A closure that reevaluates the model and returns the loss.
6867
"""
6968
loss = None
7069
if closure is not None:

torch_optimizer/lookahead.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,8 +63,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
6363
r"""Performs a single optimization step.
6464
6565
Arguments:
66-
closure (callable, optional): A closure that reevaluates the model
67-
and returns the loss.
66+
closure: A closure that reevaluates the model and returns the loss.
6867
"""
6968
loss = self.optimizer.step(closure=closure)
7069
for group in self.param_groups:

torch_optimizer/novograd.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,8 +79,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
7979
r"""Performs a single optimization step.
8080
8181
Arguments:
82-
closure (callable, optional): A closure that reevaluates the model
83-
and returns the loss.
82+
closure: A closure that reevaluates the model and returns the loss.
8483
"""
8584
loss = None
8685
if closure is not None:

torch_optimizer/radam.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,8 +61,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
6161
r"""Performs a single optimization step.
6262
6363
Arguments:
64-
closure (callable, optional): A closure that reevaluates the model
65-
and returns the loss.
64+
closure: A closure that reevaluates the model and returns the loss.
6665
"""
6766

6867
loss = None

torch_optimizer/sgdw.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
6969
"""Performs a single optimization step.
7070
7171
Arguments:
72-
closure (callable, optional): A closure that reevaluates the model
73-
and returns the loss.
72+
closure: A closure that reevaluates the model and returns the loss.
7473
"""
7574
loss = None
7675
if closure is not None:

torch_optimizer/yogi.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,7 @@ def step(self, closure: OptLossClosure = None) -> OptFloat:
5959
r"""Performs a single optimization step.
6060
6161
Arguments:
62-
closure (callable, optional): A closure that reevaluates the model
63-
and returns the loss.
62+
closure: A closure that reevaluates the model and returns the loss.
6463
"""
6564
loss = None
6665
if closure is not None:

0 commit comments

Comments
 (0)