Skip to content

Commit a054448

Browse files
authored
Merge pull request Azure#426 from rastala/master
version 1.0.43
2 parents 67e10e0 + 79c9f50 commit a054448

File tree

33 files changed

+7263
-3153
lines changed

33 files changed

+7263
-3153
lines changed

configuration.ipynb

Lines changed: 288 additions & 290 deletions
Large diffs are not rendered by default.

how-to-use-azureml/automated-machine-learning/dataprep-remote-execution/auto-ml-dataprep-remote-execution.ipynb

Lines changed: 56 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,7 @@
192192
"cell_type": "markdown",
193193
"metadata": {},
194194
"source": [
195-
"### Create or Attach a Remote Linux DSVM"
195+
"### Create or Attach an AmlCompute cluster"
196196
]
197197
},
198198
{
@@ -201,21 +201,36 @@
201201
"metadata": {},
202202
"outputs": [],
203203
"source": [
204-
"dsvm_name = 'mydsvmb'\n",
204+
"from azureml.core.compute import AmlCompute\n",
205+
"from azureml.core.compute import ComputeTarget\n",
205206
"\n",
206-
"try:\n",
207-
" while ws.compute_targets[dsvm_name].provisioning_state == 'Creating':\n",
208-
" time.sleep(1)\n",
209-
" \n",
210-
" dsvm_compute = DsvmCompute(ws, dsvm_name)\n",
211-
" print('Found existing DVSM.')\n",
212-
"except:\n",
213-
" print('Creating a new DSVM.')\n",
214-
" dsvm_config = DsvmCompute.provisioning_configuration(vm_size = \"Standard_D2_v2\")\n",
215-
" dsvm_compute = DsvmCompute.create(ws, name = dsvm_name, provisioning_configuration = dsvm_config)\n",
216-
" dsvm_compute.wait_for_completion(show_output = True)\n",
217-
" print(\"Waiting one minute for ssh to be accessible\")\n",
218-
" time.sleep(90) # Wait for ssh to be accessible"
207+
"# Choose a name for your cluster.\n",
208+
"amlcompute_cluster_name = \"cpucluster\"\n",
209+
"\n",
210+
"found = False\n",
211+
"\n",
212+
"# Check if this compute target already exists in the workspace.\n",
213+
"\n",
214+
"cts = ws.compute_targets\n",
215+
"if amlcompute_cluster_name in cts and cts[amlcompute_cluster_name].type == 'AmlCompute':\n",
216+
" found = True\n",
217+
" print('Found existing compute target.')\n",
218+
" compute_target = cts[amlcompute_cluster_name]\n",
219+
"\n",
220+
"if not found:\n",
221+
" print('Creating a new compute target...')\n",
222+
" provisioning_config = AmlCompute.provisioning_configuration(vm_size = \"STANDARD_D2_V2\", # for GPU, use \"STANDARD_NC6\"\n",
223+
" #vm_priority = 'lowpriority', # optional\n",
224+
" max_nodes = 6)\n",
225+
"\n",
226+
" # Create the cluster.\\n\",\n",
227+
" compute_target = ComputeTarget.create(ws, amlcompute_cluster_name, provisioning_config)\n",
228+
"\n",
229+
" # Can poll for a minimum number of nodes and for a specific timeout.\n",
230+
" # If no min_node_count is provided, it will use the scale settings for the cluster.\n",
231+
" compute_target.wait_for_completion(show_output = True, min_node_count = None, timeout_in_minutes = 20)\n",
232+
"\n",
233+
" # For a more detailed view of current AmlCompute status, use get_status()."
219234
]
220235
},
221236
{
@@ -227,9 +242,13 @@
227242
"from azureml.core.runconfig import RunConfiguration\n",
228243
"from azureml.core.conda_dependencies import CondaDependencies\n",
229244
"\n",
245+
"# create a new RunConfig object\n",
230246
"conda_run_config = RunConfiguration(framework=\"python\")\n",
231247
"\n",
232-
"conda_run_config.target = dsvm_compute\n",
248+
"# Set compute target to AmlCompute\n",
249+
"conda_run_config.target = compute_target\n",
250+
"conda_run_config.environment.docker.enabled = True\n",
251+
"conda_run_config.environment.docker.base_image = azureml.core.runconfig.DEFAULT_CPU_IMAGE\n",
233252
"\n",
234253
"cd = CondaDependencies.create(pip_packages=['azureml-sdk[automl]'], conda_packages=['numpy','py-xgboost<=0.80'])\n",
235254
"conda_run_config.environment.python.conda_dependencies = cd"
@@ -294,6 +313,27 @@
294313
"remote_run.clean_preprocessor_cache()"
295314
]
296315
},
316+
{
317+
"cell_type": "markdown",
318+
"metadata": {},
319+
"source": [
320+
"### Cancelling Runs\n",
321+
"You can cancel ongoing remote runs using the `cancel` and `cancel_iteration` functions."
322+
]
323+
},
324+
{
325+
"cell_type": "code",
326+
"execution_count": null,
327+
"metadata": {},
328+
"outputs": [],
329+
"source": [
330+
"# Cancel the ongoing experiment and stop scheduling new iterations.\n",
331+
"# remote_run.cancel()\n",
332+
"\n",
333+
"# Cancel iteration 1 and move onto iteration 2.\n",
334+
"# remote_run.cancel_iteration(1)"
335+
]
336+
},
297337
{
298338
"cell_type": "markdown",
299339
"metadata": {},

0 commit comments

Comments
 (0)