Skip to content

Commit

Permalink
Spellings (#742) (#742)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #742

TSIA

Reviewed By: manav-a

Differential Revision: D47722348

fbshipit-source-id: d98991229a3677276726448ab86e980f33e84bbc
  • Loading branch information
kunalb authored Jul 26, 2023
1 parent d452a8c commit a17e427
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
4 changes: 2 additions & 2 deletions torchx/schedulers/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def __hash__(self) -> int:
class Scheduler(abc.ABC, Generic[T]):
"""
An interface abstracting functionalities of a scheduler.
Implementors need only implement those methods annotated with
Implementers need only implement those methods annotated with
``@abc.abstractmethod``.
"""

Expand Down Expand Up @@ -148,7 +148,7 @@ def submit(
def schedule(self, dryrun_info: AppDryRunInfo) -> str:
"""
Same as ``submit`` except that it takes an ``AppDryRunInfo``.
Implementors are encouraged to implement this method rather than
Implementers are encouraged to implement this method rather than
directly implementing ``submit`` since ``submit`` can be trivially
implemented by:
Expand Down
6 changes: 3 additions & 3 deletions torchx/schedulers/local_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ class ReplicaParam:

class ImageProvider(abc.ABC):
"""
Manages downloading and setting up an on localhost. This is only needed for
Manages downloading and setting up an image on localhost. This is only needed for
``LocalhostScheduler`` since typically real schedulers will do this
on-behalf of the user.
"""
Expand Down Expand Up @@ -766,7 +766,7 @@ def _submit_dryrun(
def _cuda_device_count(self) -> int:
# this method deliberately does not use ``torch.cuda.device_count()``
# to avoid taking a dependency on pytorch
# this make sit possible to avoid a BUCK dependency (internally at Meta)
# this makes it possible to avoid a BUCK dependency (internally at Meta)
# on //caffe2:torch which slows down builds of //torchx:* rules
gpu_cmd = "nvidia-smi -L"
try:
Expand Down Expand Up @@ -832,7 +832,7 @@ def auto_set_CUDA_VISIBLE_DEVICES(
"""\n
======================================================================
Running multiple role replicas that require GPUs without
setting `CUDA_VISIBLE_DEVICES` may result in multiple
setting `CUDA_VISIBLE_DEVICES` may result in multiple
processes using the same GPU device with undesired consequences
such as CUDA OutOfMemory errors.
Expand Down
2 changes: 1 addition & 1 deletion torchx/specs/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,7 @@ class AppDef:
Args:
name: Name of application
roles: List of roles
metadata: metadata to the app (treament of metadata is scheduler dependent)
metadata: metadata to the app (treatment of metadata is scheduler dependent)
"""

name: str
Expand Down

0 comments on commit a17e427

Please sign in to comment.