diff --git a/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md b/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md index 36ad28477d..b081644216 100644 --- a/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md +++ b/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md @@ -103,7 +103,7 @@ Before you start, make sure: === "API" Copy the following command to your terminal. Make sure to update the below parameters according to the comments. For more details, see [Workspaces API:](https://api-docs.run.ai/latest/tag/Workspaces) - ```sh + ```bash curl -L 'https:///api/v1/workloads/workspaces' \ # is the link to the Run:ai user interface. -H 'Content-Type: application/json' \ -H 'Authorization: Bearer ' \ # is the API access token obtained in Step 1. @@ -114,21 +114,20 @@ Before you start, make sure: "spec": { "command" : "start-notebook.sh", "args" : "--NotebookApp.base_url=/${RUNAI_PROJECT}/${RUNAI_JOB_NAME} --NotebookApp.token=''", - "image": "jupyter/scipy-notebook", + "image": "gcr.io/run-ai-lab/pytorch-example-jupyter", + "exposedUrls": [ + { + "container": 8888, + "toolType": "jupyter-notebook", + "toolName": "Jupyter" + } + ], "compute": { - "gpuDevicesRequest": 1 - "gpuMemoryRequest": "4G", - "gpuMemoryLimit": "12G", - "largeShmRequest": true - - }, - "exposedUrls" : [ - { - "container" : 8888, - "toolType": "jupyter-notebook", \ #toolType will show the Jupyter icon when connecting to the Jupyter tool via the user interface. - "toolName": "Jupyter" \ #toolName text will show when connecting to the Jupyter tool via the user interface. - } - ] + "gpuDevicesRequest": 1, + "gpuMemoryRequest": "4G", + "gpuMemoryLimit": "12G", + "largeShmRequest": true + } } }' ``` @@ -172,7 +171,7 @@ Before you start, make sure: === "API" Copy the following command to your terminal. Make sure to update the below parameters according to the comments. For more details, see [Workspaces API](https://api-docs.run.ai/latest/tag/Workspaces): - ```sh + ```bash curl -L 'https:///api/v1/workloads/workspaces' \ # is the link to the Run:ai user interface. -H 'Content-Type: application/json' \ -H 'Authorization: Bearer ' \ # is the API access token obtained in Step 1. @@ -183,21 +182,20 @@ Before you start, make sure: "spec": { "command" : "start-notebook.sh", "args" : "--NotebookApp.base_url=/${RUNAI_PROJECT}/${RUNAI_JOB_NAME} --NotebookApp.token=''", - "image": "jupyter/scipy-notebook", + "image": "gcr.io/run-ai-lab/pytorch-example-jupyter", + "exposedUrls": [ + { + "container": 8888, + "toolType": "jupyter-notebook", + "toolName": "Jupyter" + } + ], "compute": { - "gpuDevicesRequest": 1 - "gpuMemoryRequest": "4G", - "gpuMemoryLimit": "12G", - "largeShmRequest": true - - }, - "exposedUrls" : [ - { - "container" : 8888, - "toolType": "jupyter-notebook", \ #toolType will show the Jupyter icon when connecting to the Jupyter tool via the user interface. - "toolName": "Jupyter" \ #toolName text will show when connecting to the Jupyter tool via the user interface. - } - ] + "gpuDevicesRequest": 1, + "gpuMemoryRequest": "4G", + "gpuMemoryLimit": "12G", + "largeShmRequest": true + } } }' ``` diff --git a/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md b/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md index 63e89f72c2..c583c734cb 100644 --- a/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md +++ b/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md @@ -139,20 +139,20 @@ Before you start, make sure: "spec": { "command" : "start-notebook.sh", "args" : "--NotebookApp.base_url=/${RUNAI_PROJECT}/${RUNAI_JOB_NAME} --NotebookApp.token=''", - "image": "jupyter/scipy-notebook", + "image": "jupyter/base-notebook", + "exposedUrls": [ + { + "container": 8888, + "toolType": "jupyter-notebook", + "toolName": "Jupyter" + } + ], "compute": { - "gpuDevicesRequest": 1 - "gpuRequestType": "portion", - "gpuPortionRequest": 0.1 - - }, - "exposedUrls" : [ - { - "container" : 8888, - "toolType": "jupyter-notebook", \ #toolType will show the Jupyter icon when connecting to the Jupyter tool via the user interface. - "toolName": "Jupyter" \ #toolName text will show when connecting to the Jupyter tool via the user interface. - } - ] + "gpuDevicesRequest": 1, + "gpuRequestType": "portion", + "gpuPortionRequest": 0.1 + + } } }' ``` diff --git a/docs/Researcher/scheduling/launching-workloads-with-gpu-memory-swap.md b/docs/Researcher/scheduling/launching-workloads-with-gpu-memory-swap.md index dd68a3f74e..76effb05b5 100644 --- a/docs/Researcher/scheduling/launching-workloads-with-gpu-memory-swap.md +++ b/docs/Researcher/scheduling/launching-workloads-with-gpu-memory-swap.md @@ -103,9 +103,9 @@ Before you start, make sure: } ], "compute": { - "gpuDevicesRequest": 1 + "gpuDevicesRequest": 1, "gpuRequestType": "portion", - "gpuPortionRequest": 0.1 + "gpuPortionRequest": 0.1, "gpuPortionLimit": 1, "cpuCoreRequest":0.2, "cpuMemoryRequest": "200M", @@ -172,9 +172,9 @@ Before you start, make sure: } ], "compute": { - "gpuDevicesRequest": 1 + "gpuDevicesRequest": 1, "gpuRequestType": "portion", - "gpuPortionRequest": 0.1 + "gpuPortionRequest": 0.1, "gpuPortionLimit": 1, "cpuCoreRequest":0.2, "cpuMemoryRequest": "200M", diff --git a/docs/Researcher/workloads/workspaces/quickstart-jupyter.md b/docs/Researcher/workloads/workspaces/quickstart-jupyter.md index 953dc25ae6..0e39d405da 100644 --- a/docs/Researcher/workloads/workspaces/quickstart-jupyter.md +++ b/docs/Researcher/workloads/workspaces/quickstart-jupyter.md @@ -40,7 +40,7 @@ Before you start, make sure: ## Step 2: Submitting a workspace -=== "UI +=== "UI" 1. Go to the Workload manager → Workloads 2. Select __+NEW WORKLOAD__ and then __Workspace__ 3. Select under which __cluster__ to create the workload