diff --git a/docs/source-app/code_samples/quickstart/app/app_1.py b/docs/source-app/code_samples/quickstart/app/app_1.py index ac41c5ef83fa1..29d8db245a170 100644 --- a/docs/source-app/code_samples/quickstart/app/app_1.py +++ b/docs/source-app/code_samples/quickstart/app/app_1.py @@ -88,5 +88,5 @@ def run(self): # Step 4: download a dataset to your local directory under `/data` download_data("https://pl-flash-data.s3.amazonaws.com/hymenoptera_data.zip", "./data") -# Initalize your Lightning app with 5 epochs +# Initialize your Lightning app with 5 epochs app = L.LightningApp(RootFlow(5, "./data/hymenoptera_data")) diff --git a/docs/source-app/code_samples/quickstart/hello_world/app.py b/docs/source-app/code_samples/quickstart/hello_world/app.py index 7514d9085604b..07a764cb98538 100644 --- a/docs/source-app/code_samples/quickstart/hello_world/app.py +++ b/docs/source-app/code_samples/quickstart/hello_world/app.py @@ -12,5 +12,5 @@ def run(self): print("Hello World!") -# Step 3: Initalize a LightningApp with the LightningFlow you defined (in step 1) +# Step 3: Initialize a LightningApp with the LightningFlow you defined (in step 1) app = L.LightningApp(HelloWorld()) diff --git a/docs/source-app/get_started/go_beyond_training_content.rst b/docs/source-app/get_started/go_beyond_training_content.rst index a471e91d85a9c..c8baeb8fab77b 100644 --- a/docs/source-app/get_started/go_beyond_training_content.rst +++ b/docs/source-app/get_started/go_beyond_training_content.rst @@ -308,7 +308,7 @@ Implement the ``configure_layout`` method to connect them together: 5: Init the ``app`` object ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Initalize an ``app`` object with the ``TrainDeploy`` component (this won't run the App yet): +Initialize an ``app`` object with the ``TrainDeploy`` component (this won't run the App yet): .. code:: python :emphasize-lines: 29 diff --git a/docs/source-pytorch/common/checkpointing_basic.rst b/docs/source-pytorch/common/checkpointing_basic.rst index 6ff54c94245d2..8a4834096c44d 100644 --- a/docs/source-pytorch/common/checkpointing_basic.rst +++ b/docs/source-pytorch/common/checkpointing_basic.rst @@ -106,8 +106,8 @@ The LightningModule also has access to the Hyperparameters ---- -Initalize with other parameters -=============================== +Initialize with other parameters +================================ If you used the *self.save_hyperparameters()* method in the init of the LightningModule, you can initialize the model with different hyperparameters. .. code-block:: python