46
46
# -----------------------
47
47
#
48
48
# Our goal is to optimize the PyTorch Lightning training job defined in
49
- # `mnist_train_nas.py <https://github.com/pytorch/tutorials/tree/master/beginner_source /mnist_train_nas.py>`__.
49
+ # `mnist_train_nas.py <https://github.com/pytorch/tutorials/tree/master/intermediate_source /mnist_train_nas.py>`__.
50
50
# To do this using TorchX, we write a helper function that takes in
51
51
# the values of the architcture and hyperparameters of the training
52
52
# job and creates a `TorchX AppDef <https://pytorch.org/torchx/latest/basics.html>`__
@@ -103,7 +103,7 @@ def trainer(
103
103
# Setting up the Runner
104
104
# ---------------------
105
105
#
106
- # Ax’s `Runner <https://ax.dev/api/core.html#module- ax.core.runner>`__
106
+ # Ax’s `Runner <https://ax.dev/api/core.html#ax.core.runner.Runner >`__
107
107
# abstraction allows writing interfaces to various backends.
108
108
# Ax already comes with Runner for TorchX, and so we just need to
109
109
# configure it. For the purpose of this tutorial we run jobs locally
@@ -228,7 +228,7 @@ def trainer(
228
228
# fashion locally and write the results to the ``log_dir`` based on the trial
229
229
# index (see the ``trainer()`` function above). We will define a metric
230
230
# class that is aware of that logging directory. By subclassing
231
- # `TensorboardCurveMetric <https://ax.dev/tutorials/multiobjective_optimization .html>`__
231
+ # `TensorboardCurveMetric <https://ax.dev/api/metrics .html?highlight=tensorboardcurvemetric#ax.metrics.tensorboard.TensorboardCurveMetric >`__
232
232
# we get the logic to read and parse the Tensorboard logs for free.
233
233
#
234
234
@@ -314,7 +314,7 @@ def is_available_while_running(cls):
314
314
# Creating the Ax Experiment
315
315
# --------------------------
316
316
#
317
- # In Ax, the `Experiment <https://ax.dev/api/core.html#module- ax.core.experiment>`__
317
+ # In Ax, the `Experiment <https://ax.dev/api/core.html#ax.core.experiment.Experiment >`__
318
318
# object is the object that stores all the information about the problem
319
319
# setup.
320
320
#
@@ -338,7 +338,7 @@ def is_available_while_running(cls):
338
338
# Choosing the GenerationStrategy
339
339
# -------------------------------
340
340
#
341
- # A `GenerationStrategy <https://ax.dev/api/modelbridge.html#module- ax.modelbridge.generation_strategy>`__
341
+ # A `GenerationStrategy <https://ax.dev/api/modelbridge.html#ax.modelbridge.generation_strategy.GenerationStrategy >`__
342
342
# is the abstract representation of how we would like to perform the
343
343
# optimization. While this can be customized (if you’d like to do so, see
344
344
# `this tutorial <https://ax.dev/tutorials/generation_strategy.html>`__),
0 commit comments