From c2e67f6a2554bb19da38d8d237b8efd71ae4b9ce Mon Sep 17 00:00:00 2001 From: Sherin Date: Thu, 27 Feb 2025 14:35:16 +0200 Subject: [PATCH 1/2] Updates --- .../launching-workloads-with-dynamic-gpu-fractions.md | 4 ++-- .../scheduling/launching-workloads-with-gpu-fractions.md | 2 +- .../launching-workloads-with-gpu-memory-swap.md | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md b/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md index 36ad28477d..434ccb5b71 100644 --- a/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md +++ b/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md @@ -116,7 +116,7 @@ Before you start, make sure: "args" : "--NotebookApp.base_url=/${RUNAI_PROJECT}/${RUNAI_JOB_NAME} --NotebookApp.token=''", "image": "jupyter/scipy-notebook", "compute": { - "gpuDevicesRequest": 1 + "gpuDevicesRequest": 1, "gpuMemoryRequest": "4G", "gpuMemoryLimit": "12G", "largeShmRequest": true @@ -185,7 +185,7 @@ Before you start, make sure: "args" : "--NotebookApp.base_url=/${RUNAI_PROJECT}/${RUNAI_JOB_NAME} --NotebookApp.token=''", "image": "jupyter/scipy-notebook", "compute": { - "gpuDevicesRequest": 1 + "gpuDevicesRequest": 1, "gpuMemoryRequest": "4G", "gpuMemoryLimit": "12G", "largeShmRequest": true diff --git a/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md b/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md index 63e89f72c2..f19bc119a2 100644 --- a/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md +++ b/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md @@ -141,7 +141,7 @@ Before you start, make sure: "args" : "--NotebookApp.base_url=/${RUNAI_PROJECT}/${RUNAI_JOB_NAME} --NotebookApp.token=''", "image": "jupyter/scipy-notebook", "compute": { - "gpuDevicesRequest": 1 + "gpuDevicesRequest": 1, "gpuRequestType": "portion", "gpuPortionRequest": 0.1 diff --git a/docs/Researcher/scheduling/launching-workloads-with-gpu-memory-swap.md b/docs/Researcher/scheduling/launching-workloads-with-gpu-memory-swap.md index dd68a3f74e..76effb05b5 100644 --- a/docs/Researcher/scheduling/launching-workloads-with-gpu-memory-swap.md +++ b/docs/Researcher/scheduling/launching-workloads-with-gpu-memory-swap.md @@ -103,9 +103,9 @@ Before you start, make sure: } ], "compute": { - "gpuDevicesRequest": 1 + "gpuDevicesRequest": 1, "gpuRequestType": "portion", - "gpuPortionRequest": 0.1 + "gpuPortionRequest": 0.1, "gpuPortionLimit": 1, "cpuCoreRequest":0.2, "cpuMemoryRequest": "200M", @@ -172,9 +172,9 @@ Before you start, make sure: } ], "compute": { - "gpuDevicesRequest": 1 + "gpuDevicesRequest": 1, "gpuRequestType": "portion", - "gpuPortionRequest": 0.1 + "gpuPortionRequest": 0.1, "gpuPortionLimit": 1, "cpuCoreRequest":0.2, "cpuMemoryRequest": "200M", From 2dc8ad6d1d27022d64c0ef2bf12a6ab34af34058 Mon Sep 17 00:00:00 2001 From: Sherin Date: Thu, 27 Feb 2025 14:58:18 +0200 Subject: [PATCH 2/2] Updated quick starts --- ...ng-workloads-with-dynamic-gpu-fractions.md | 58 +++++++++---------- .../launching-workloads-with-gpu-fractions.md | 26 ++++----- .../workspaces/quickstart-jupyter.md | 2 +- 3 files changed, 42 insertions(+), 44 deletions(-) diff --git a/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md b/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md index 434ccb5b71..b081644216 100644 --- a/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md +++ b/docs/Researcher/scheduling/launching-workloads-with-dynamic-gpu-fractions.md @@ -103,7 +103,7 @@ Before you start, make sure: === "API" Copy the following command to your terminal. Make sure to update the below parameters according to the comments. For more details, see [Workspaces API:](https://api-docs.run.ai/latest/tag/Workspaces) - ```sh + ```bash curl -L 'https:///api/v1/workloads/workspaces' \ # is the link to the Run:ai user interface. -H 'Content-Type: application/json' \ -H 'Authorization: Bearer ' \ # is the API access token obtained in Step 1. @@ -114,21 +114,20 @@ Before you start, make sure: "spec": { "command" : "start-notebook.sh", "args" : "--NotebookApp.base_url=/${RUNAI_PROJECT}/${RUNAI_JOB_NAME} --NotebookApp.token=''", - "image": "jupyter/scipy-notebook", + "image": "gcr.io/run-ai-lab/pytorch-example-jupyter", + "exposedUrls": [ + { + "container": 8888, + "toolType": "jupyter-notebook", + "toolName": "Jupyter" + } + ], "compute": { - "gpuDevicesRequest": 1, - "gpuMemoryRequest": "4G", - "gpuMemoryLimit": "12G", - "largeShmRequest": true - - }, - "exposedUrls" : [ - { - "container" : 8888, - "toolType": "jupyter-notebook", \ #toolType will show the Jupyter icon when connecting to the Jupyter tool via the user interface. - "toolName": "Jupyter" \ #toolName text will show when connecting to the Jupyter tool via the user interface. - } - ] + "gpuDevicesRequest": 1, + "gpuMemoryRequest": "4G", + "gpuMemoryLimit": "12G", + "largeShmRequest": true + } } }' ``` @@ -172,7 +171,7 @@ Before you start, make sure: === "API" Copy the following command to your terminal. Make sure to update the below parameters according to the comments. For more details, see [Workspaces API](https://api-docs.run.ai/latest/tag/Workspaces): - ```sh + ```bash curl -L 'https:///api/v1/workloads/workspaces' \ # is the link to the Run:ai user interface. -H 'Content-Type: application/json' \ -H 'Authorization: Bearer ' \ # is the API access token obtained in Step 1. @@ -183,21 +182,20 @@ Before you start, make sure: "spec": { "command" : "start-notebook.sh", "args" : "--NotebookApp.base_url=/${RUNAI_PROJECT}/${RUNAI_JOB_NAME} --NotebookApp.token=''", - "image": "jupyter/scipy-notebook", + "image": "gcr.io/run-ai-lab/pytorch-example-jupyter", + "exposedUrls": [ + { + "container": 8888, + "toolType": "jupyter-notebook", + "toolName": "Jupyter" + } + ], "compute": { - "gpuDevicesRequest": 1, - "gpuMemoryRequest": "4G", - "gpuMemoryLimit": "12G", - "largeShmRequest": true - - }, - "exposedUrls" : [ - { - "container" : 8888, - "toolType": "jupyter-notebook", \ #toolType will show the Jupyter icon when connecting to the Jupyter tool via the user interface. - "toolName": "Jupyter" \ #toolName text will show when connecting to the Jupyter tool via the user interface. - } - ] + "gpuDevicesRequest": 1, + "gpuMemoryRequest": "4G", + "gpuMemoryLimit": "12G", + "largeShmRequest": true + } } }' ``` diff --git a/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md b/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md index f19bc119a2..c583c734cb 100644 --- a/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md +++ b/docs/Researcher/scheduling/launching-workloads-with-gpu-fractions.md @@ -139,20 +139,20 @@ Before you start, make sure: "spec": { "command" : "start-notebook.sh", "args" : "--NotebookApp.base_url=/${RUNAI_PROJECT}/${RUNAI_JOB_NAME} --NotebookApp.token=''", - "image": "jupyter/scipy-notebook", + "image": "jupyter/base-notebook", + "exposedUrls": [ + { + "container": 8888, + "toolType": "jupyter-notebook", + "toolName": "Jupyter" + } + ], "compute": { - "gpuDevicesRequest": 1, - "gpuRequestType": "portion", - "gpuPortionRequest": 0.1 - - }, - "exposedUrls" : [ - { - "container" : 8888, - "toolType": "jupyter-notebook", \ #toolType will show the Jupyter icon when connecting to the Jupyter tool via the user interface. - "toolName": "Jupyter" \ #toolName text will show when connecting to the Jupyter tool via the user interface. - } - ] + "gpuDevicesRequest": 1, + "gpuRequestType": "portion", + "gpuPortionRequest": 0.1 + + } } }' ``` diff --git a/docs/Researcher/workloads/workspaces/quickstart-jupyter.md b/docs/Researcher/workloads/workspaces/quickstart-jupyter.md index 953dc25ae6..0e39d405da 100644 --- a/docs/Researcher/workloads/workspaces/quickstart-jupyter.md +++ b/docs/Researcher/workloads/workspaces/quickstart-jupyter.md @@ -40,7 +40,7 @@ Before you start, make sure: ## Step 2: Submitting a workspace -=== "UI +=== "UI" 1. Go to the Workload manager → Workloads 2. Select __+NEW WORKLOAD__ and then __Workspace__ 3. Select under which __cluster__ to create the workload