From d467d5b2f805686cb8128dc371904ba6bf186e09 Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Fri, 11 Jul 2025 07:10:50 +0000 Subject: [PATCH 1/2] Initialize repository for autorest build 20250711.1 --- eng/emitter-package-lock.json | 362 +++++++++++++++++++++++----------- eng/emitter-package.json | 28 +-- 2 files changed, 257 insertions(+), 133 deletions(-) diff --git a/eng/emitter-package-lock.json b/eng/emitter-package-lock.json index 0302c4091f4b..1eddae33dbea 100644 --- a/eng/emitter-package-lock.json +++ b/eng/emitter-package-lock.json @@ -1,20 +1,18 @@ { - "name": "dist/src/index.js", + "name": "emitter-consumer", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "dist/src/index.js", "dependencies": { - "@azure-tools/typespec-python": "0.46.0" + "@azure-tools/typespec-python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@azure-tools/typespec-python/-/typespec-python-0.46.0-alpha.20250711.1.tgz" }, "devDependencies": { - "@azure-tools/typespec-autorest": "~0.57.1", + "@azure-tools/typespec-autorest": "~0.57.0", "@azure-tools/typespec-azure-core": "~0.57.0", - "@azure-tools/typespec-azure-resource-manager": "~0.57.2", - "@azure-tools/typespec-azure-rulesets": "~0.57.1", - "@azure-tools/typespec-client-generator-core": "~0.57.3", - "@azure-tools/typespec-liftr-base": "0.8.0", + "@azure-tools/typespec-azure-resource-manager": "~0.57.0", + "@azure-tools/typespec-azure-rulesets": "~0.57.0", + "@azure-tools/typespec-client-generator-core": "~0.57.2", "@typespec/compiler": "^1.1.0", "@typespec/events": "~0.71.0", "@typespec/http": "^1.1.0", @@ -30,6 +28,7 @@ "version": "0.57.1", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-autorest/-/typespec-autorest-0.57.1.tgz", "integrity": "sha512-AZ/SlkkxvRT/CJs6wOUbORwKYztU3D8+lR3hcj34vQlR/U3qSTCiCdL6xA4WH9LcYmP9aGjpopprGOEHQiU5SQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -49,6 +48,7 @@ "version": "0.57.0", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-core/-/typespec-azure-core-0.57.0.tgz", "integrity": "sha512-O+F3axrJOJHjYGrQLRWoydHtWjWiXeAlaaILncS0I0xe6kinyFkpn7VIVKxH9ZZ+hPmkDAZybO53656R3PRfUA==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -63,6 +63,7 @@ "version": "0.57.2", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-resource-manager/-/typespec-azure-resource-manager-0.57.2.tgz", "integrity": "sha512-ljWdjsXpisst4AjnZsU/YMBcqcCGAVnloUaVf39aylFrvakdEQ/Esi/1Jrap05a9C7aXStzzZt3WZ8bPyQXmDw==", + "dev": true, "license": "MIT", "dependencies": { "change-case": "~5.4.4", @@ -84,6 +85,7 @@ "version": "0.57.1", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-rulesets/-/typespec-azure-rulesets-0.57.1.tgz", "integrity": "sha512-+W+vPGiV4qpqwIeBb4k6sIvDidHxV4dlw4xW9rqoxR/dOTeIsHP6hOATpf8AMsWHcmOwvTn4ThDPhFgBCswvnw==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -99,6 +101,7 @@ "version": "0.57.3", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-client-generator-core/-/typespec-client-generator-core-0.57.3.tgz", "integrity": "sha512-c/OPeSpKH29jD2Abuli8z7ww5uorplOt9w3KbaQaMSx12u6gWi5vtYhQaFKk9AGiFGDyLPeA0+qo+UQ6t3pRBg==", + "dev": true, "license": "MIT", "dependencies": { "change-case": "~5.4.4", @@ -121,16 +124,10 @@ "@typespec/xml": "^0.71.0" } }, - "node_modules/@azure-tools/typespec-liftr-base": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-liftr-base/-/typespec-liftr-base-0.8.0.tgz", - "integrity": "sha512-xftTTtVjDuxIzugQ9nL/abmttdDM3HAf5HhqKzs9DO0Kl0ZhXQlB2DYlT1hBs/N+IWerMF9k2eKs2RncngA03g==", - "dev": true - }, "node_modules/@azure-tools/typespec-python": { - "version": "0.46.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-python/-/typespec-python-0.46.0.tgz", - "integrity": "sha512-pxPuVFrUoDQPZ9MvAdTwDNJXqCcx04aofe0X60f4xmDyCJOGmIFvWXUSmu64Wd+mU2CeuHXCSSG3E4VO8bUnUw==", + "version": "0.46.0-alpha.20250711.1", + "resolved": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@azure-tools/typespec-python/-/typespec-python-0.46.0-alpha.20250711.1.tgz", + "integrity": "sha512-syl8QMaQG30qj33dxu1eWTDj5yTEJTTnwCN5EW5VOIbk627oFGuJwPsalq6fU3Nf+l2+M7mITz5TIueCIiaF+w==", "hasInstallScript": true, "license": "MIT", "dependencies": { @@ -164,6 +161,7 @@ "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, "license": "MIT", "dependencies": { "@babel/helper-validator-identifier": "^7.27.1", @@ -178,15 +176,16 @@ "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "dev": true, "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.5.tgz", - "integrity": "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.6.tgz", + "integrity": "sha512-ShbM/3XxwuxjFiuVBHA+d3j5dyac0aEVVq1oluIDf71hUw0aRF59dV/efUsIwFnR6m8JNM2FjZOzmaZ8yG61kw==", "cpu": [ "ppc64" ], @@ -200,9 +199,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.5.tgz", - "integrity": "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.6.tgz", + "integrity": "sha512-S8ToEOVfg++AU/bHwdksHNnyLyVM+eMVAOf6yRKFitnwnbwwPNqKr3srzFRe7nzV69RQKb5DgchIX5pt3L53xg==", "cpu": [ "arm" ], @@ -216,9 +215,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.5.tgz", - "integrity": "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.6.tgz", + "integrity": "sha512-hd5zdUarsK6strW+3Wxi5qWws+rJhCCbMiC9QZyzoxfk5uHRIE8T287giQxzVpEvCwuJ9Qjg6bEjcRJcgfLqoA==", "cpu": [ "arm64" ], @@ -232,9 +231,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.5.tgz", - "integrity": "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.6.tgz", + "integrity": "sha512-0Z7KpHSr3VBIO9A/1wcT3NTy7EB4oNC4upJ5ye3R7taCc2GUdeynSLArnon5G8scPwaU866d3H4BCrE5xLW25A==", "cpu": [ "x64" ], @@ -248,9 +247,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.5.tgz", - "integrity": "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.6.tgz", + "integrity": "sha512-FFCssz3XBavjxcFxKsGy2DYK5VSvJqa6y5HXljKzhRZ87LvEi13brPrf/wdyl/BbpbMKJNOr1Sd0jtW4Ge1pAA==", "cpu": [ "arm64" ], @@ -264,9 +263,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.5.tgz", - "integrity": "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.6.tgz", + "integrity": "sha512-GfXs5kry/TkGM2vKqK2oyiLFygJRqKVhawu3+DOCk7OxLy/6jYkWXhlHwOoTb0WqGnWGAS7sooxbZowy+pK9Yg==", "cpu": [ "x64" ], @@ -280,9 +279,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.5.tgz", - "integrity": "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.6.tgz", + "integrity": "sha512-aoLF2c3OvDn2XDTRvn8hN6DRzVVpDlj2B/F66clWd/FHLiHaG3aVZjxQX2DYphA5y/evbdGvC6Us13tvyt4pWg==", "cpu": [ "arm64" ], @@ -296,9 +295,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.5.tgz", - "integrity": "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.6.tgz", + "integrity": "sha512-2SkqTjTSo2dYi/jzFbU9Plt1vk0+nNg8YC8rOXXea+iA3hfNJWebKYPs3xnOUf9+ZWhKAaxnQNUf2X9LOpeiMQ==", "cpu": [ "x64" ], @@ -312,9 +311,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.5.tgz", - "integrity": "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.6.tgz", + "integrity": "sha512-SZHQlzvqv4Du5PrKE2faN0qlbsaW/3QQfUUc6yO2EjFcA83xnwm91UbEEVx4ApZ9Z5oG8Bxz4qPE+HFwtVcfyw==", "cpu": [ "arm" ], @@ -328,9 +327,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.5.tgz", - "integrity": "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.6.tgz", + "integrity": "sha512-b967hU0gqKd9Drsh/UuAm21Khpoh6mPBSgz8mKRq4P5mVK8bpA+hQzmm/ZwGVULSNBzKdZPQBRT3+WuVavcWsQ==", "cpu": [ "arm64" ], @@ -344,9 +343,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.5.tgz", - "integrity": "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.6.tgz", + "integrity": "sha512-aHWdQ2AAltRkLPOsKdi3xv0mZ8fUGPdlKEjIEhxCPm5yKEThcUjHpWB1idN74lfXGnZ5SULQSgtr5Qos5B0bPw==", "cpu": [ "ia32" ], @@ -360,9 +359,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.5.tgz", - "integrity": "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.6.tgz", + "integrity": "sha512-VgKCsHdXRSQ7E1+QXGdRPlQ/e08bN6WMQb27/TMfV+vPjjTImuT9PmLXupRlC90S1JeNNW5lzkAEO/McKeJ2yg==", "cpu": [ "loong64" ], @@ -376,9 +375,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.5.tgz", - "integrity": "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.6.tgz", + "integrity": "sha512-WViNlpivRKT9/py3kCmkHnn44GkGXVdXfdc4drNmRl15zVQ2+D2uFwdlGh6IuK5AAnGTo2qPB1Djppj+t78rzw==", "cpu": [ "mips64el" ], @@ -392,9 +391,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.5.tgz", - "integrity": "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.6.tgz", + "integrity": "sha512-wyYKZ9NTdmAMb5730I38lBqVu6cKl4ZfYXIs31Baf8aoOtB4xSGi3THmDYt4BTFHk7/EcVixkOV2uZfwU3Q2Jw==", "cpu": [ "ppc64" ], @@ -408,9 +407,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.5.tgz", - "integrity": "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.6.tgz", + "integrity": "sha512-KZh7bAGGcrinEj4qzilJ4hqTY3Dg2U82c8bv+e1xqNqZCrCyc+TL9AUEn5WGKDzm3CfC5RODE/qc96OcbIe33w==", "cpu": [ "riscv64" ], @@ -424,9 +423,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.5.tgz", - "integrity": "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.6.tgz", + "integrity": "sha512-9N1LsTwAuE9oj6lHMyyAM+ucxGiVnEqUdp4v7IaMmrwb06ZTEVCIs3oPPplVsnjPfyjmxwHxHMF8b6vzUVAUGw==", "cpu": [ "s390x" ], @@ -440,9 +439,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.5.tgz", - "integrity": "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.6.tgz", + "integrity": "sha512-A6bJB41b4lKFWRKNrWoP2LHsjVzNiaurf7wyj/XtFNTsnPuxwEBWHLty+ZE0dWBKuSK1fvKgrKaNjBS7qbFKig==", "cpu": [ "x64" ], @@ -456,9 +455,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.5.tgz", - "integrity": "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.6.tgz", + "integrity": "sha512-IjA+DcwoVpjEvyxZddDqBY+uJ2Snc6duLpjmkXm/v4xuS3H+3FkLZlDm9ZsAbF9rsfP3zeA0/ArNDORZgrxR/Q==", "cpu": [ "arm64" ], @@ -472,9 +471,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.5.tgz", - "integrity": "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.6.tgz", + "integrity": "sha512-dUXuZr5WenIDlMHdMkvDc1FAu4xdWixTCRgP7RQLBOkkGgwuuzaGSYcOpW4jFxzpzL1ejb8yF620UxAqnBrR9g==", "cpu": [ "x64" ], @@ -488,9 +487,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.5.tgz", - "integrity": "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.6.tgz", + "integrity": "sha512-l8ZCvXP0tbTJ3iaqdNf3pjaOSd5ex/e6/omLIQCVBLmHTlfXW3zAxQ4fnDmPLOB1x9xrcSi/xtCWFwCZRIaEwg==", "cpu": [ "arm64" ], @@ -504,9 +503,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.5.tgz", - "integrity": "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.6.tgz", + "integrity": "sha512-hKrmDa0aOFOr71KQ/19JC7az1P0GWtCN1t2ahYAf4O007DHZt/dW8ym5+CUdJhQ/qkZmI1HAF8KkJbEFtCL7gw==", "cpu": [ "x64" ], @@ -519,10 +518,26 @@ "node": ">=18" } }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.6.tgz", + "integrity": "sha512-+SqBcAWoB1fYKmpWoQP4pGtx+pUUC//RNYhFdbcSA16617cchuryuhOCRpPsjCblKukAckWsV+aQ3UKT/RMPcA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild/sunos-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.5.tgz", - "integrity": "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.6.tgz", + "integrity": "sha512-dyCGxv1/Br7MiSC42qinGL8KkG4kX0pEsdb0+TKhmJZgCUDBGmyo1/ArCjNGiOLiIAgdbWgmWgib4HoCi5t7kA==", "cpu": [ "x64" ], @@ -536,9 +551,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.5.tgz", - "integrity": "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.6.tgz", + "integrity": "sha512-42QOgcZeZOvXfsCBJF5Afw73t4veOId//XD3i+/9gSkhSV6Gk3VPlWncctI+JcOyERv85FUo7RxuxGy+z8A43Q==", "cpu": [ "arm64" ], @@ -552,9 +567,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.5.tgz", - "integrity": "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.6.tgz", + "integrity": "sha512-4AWhgXmDuYN7rJI6ORB+uU9DHLq/erBbuMoAuB4VWJTu5KtCgcKYPynF0YI1VkBNuEfjNlLrFr9KZPJzrtLkrQ==", "cpu": [ "ia32" ], @@ -568,9 +583,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz", - "integrity": "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.6.tgz", + "integrity": "sha512-NgJPHHbEpLQgDH2MjQu90pzW/5vvXIZ7KOnPyNBm92A6WgZ/7b6fJyUBjoumLqeOQQGqY2QjQxRo97ah4Sj0cA==", "cpu": [ "x64" ], @@ -587,6 +602,7 @@ "version": "4.1.9", "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.1.9.tgz", "integrity": "sha512-DBJBkzI5Wx4jFaYm221LHvAhpKYkhVS0k9plqHwaHhofGNxvYB7J3Bz8w+bFJ05zaMb0sZNHo4KdmENQFlNTuQ==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -611,6 +627,7 @@ "version": "5.1.13", "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.13.tgz", "integrity": "sha512-EkCtvp67ICIVVzjsquUiVSd+V5HRGOGQfsqA4E4vMWhYnB7InUL0pa0TIWt1i+OfP16Gkds8CdIu6yGZwOM1Yw==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -632,6 +649,7 @@ "version": "10.1.14", "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.1.14.tgz", "integrity": "sha512-Ma+ZpOJPewtIYl6HZHZckeX1STvDnHTCB2GVINNUlSEn2Am6LddWwfPkIGY0IUFVjUUrr/93XlBwTK6mfLjf0A==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/figures": "^1.0.12", @@ -659,6 +677,7 @@ "version": "4.2.14", "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.14.tgz", "integrity": "sha512-yd2qtLl4QIIax9DTMZ1ZN2pFrrj+yL3kgIWxm34SS6uwCr0sIhsNyudUjAo5q3TqI03xx4SEBkUJqZuAInp9uA==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -681,6 +700,7 @@ "version": "4.0.16", "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.16.tgz", "integrity": "sha512-oiDqafWzMtofeJyyGkb1CTPaxUkjIcSxePHHQCfif8t3HV9pHcw1Kgdw3/uGpDvaFfeTluwQtWiqzPVjAqS3zA==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -703,6 +723,7 @@ "version": "1.0.12", "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.12.tgz", "integrity": "sha512-MJttijd8rMFcKJC8NYmprWr6hD3r9Gd9qUC0XwPNwoEPWSMVJwA2MlXxF+nhZZNMY+HXsWa+o7KY2emWYIn0jQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -712,6 +733,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.2.0.tgz", "integrity": "sha512-opqpHPB1NjAmDISi3uvZOTrjEEU5CWVu/HBkDby8t93+6UxYX0Z7Ps0Ltjm5sZiEbWenjubwUkivAEYQmy9xHw==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -733,6 +755,7 @@ "version": "3.0.16", "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.16.tgz", "integrity": "sha512-kMrXAaKGavBEoBYUCgualbwA9jWUx2TjMA46ek+pEKy38+LFpL9QHlTd8PO2kWPUgI/KB+qi02o4y2rwXbzr3Q==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -754,6 +777,7 @@ "version": "4.0.16", "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.16.tgz", "integrity": "sha512-g8BVNBj5Zeb5/Y3cSN+hDUL7CsIFDIuVxb9EPty3lkxBaYpjL5BNRKSYOF9yOLe+JOcKFd+TSVeADQ4iSY7rbg==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -776,6 +800,7 @@ "version": "7.6.0", "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.6.0.tgz", "integrity": "sha512-jAhL7tyMxB3Gfwn4HIJ0yuJ5pvcB5maYUcouGcgd/ub79f9MqZ+aVnBtuFf+VC2GTkCBF+R+eo7Vi63w5VZlzw==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/checkbox": "^4.1.9", @@ -805,6 +830,7 @@ "version": "4.1.4", "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.1.4.tgz", "integrity": "sha512-5GGvxVpXXMmfZNtvWw4IsHpR7RzqAR624xtkPd1NxxlV5M+pShMqzL4oRddRkg8rVEOK9fKdJp1jjVML2Lr7TQ==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -827,6 +853,7 @@ "version": "3.0.16", "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.0.16.tgz", "integrity": "sha512-POCmXo+j97kTGU6aeRjsPyuCpQQfKcMXdeTMw708ZMtWrj5aykZvlUxH4Qgz3+Y1L/cAVZsSpA+UgZCu2GMOMg==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -850,6 +877,7 @@ "version": "4.2.4", "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.2.4.tgz", "integrity": "sha512-unTppUcTjmnbl/q+h8XeQDhAqIOmwWYWNyiiP2e3orXrg6tOaa5DHXja9PChCSbChOsktyKgOieRZFnajzxoBg==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.14", @@ -874,6 +902,7 @@ "version": "3.0.7", "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.7.tgz", "integrity": "sha512-PfunHQcjwnju84L+ycmcMKB/pTPIngjUJvfnRhKY6FKPuYXlM4aQCb/nIdTFR6BEhMjFvngzvng/vBAJMZpLSA==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -891,6 +920,7 @@ "version": "4.0.1", "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dev": true, "license": "ISC", "dependencies": { "minipass": "^7.0.4" @@ -903,6 +933,7 @@ "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, "license": "MIT", "dependencies": { "@nodelib/fs.stat": "2.0.5", @@ -916,6 +947,7 @@ "version": "2.0.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, "license": "MIT", "engines": { "node": ">= 8" @@ -925,6 +957,7 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, "license": "MIT", "dependencies": { "@nodelib/fs.scandir": "2.1.5", @@ -938,6 +971,7 @@ "version": "2.3.0", "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -950,6 +984,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@typespec/compiler/-/compiler-1.1.0.tgz", "integrity": "sha512-dtwosIqd2UUEEIVBR+oDiUtN4n1lP8/9GxQVno+wbkijQgKDj4Hg0Vaq6HG4BduF7RptDdtzkdGQCS9CgOIdRA==", + "dev": true, "license": "MIT", "dependencies": { "@babel/code-frame": "~7.27.1", @@ -982,6 +1017,7 @@ "version": "7.7.2", "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -994,6 +1030,7 @@ "version": "2.7.1", "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.1.tgz", "integrity": "sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ==", + "dev": true, "license": "ISC", "bin": { "yaml": "bin.mjs" @@ -1006,6 +1043,7 @@ "version": "0.71.0", "resolved": "https://registry.npmjs.org/@typespec/events/-/events-0.71.0.tgz", "integrity": "sha512-dJeyqBGqTTSlFDVWpdqeMjDpEyRmenH3yDABK3T/30MrO94sdXigxmeBnPCcOaaqst6pV3anFuKwfAqEN3GnbA==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1018,6 +1056,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@typespec/http/-/http-1.1.0.tgz", "integrity": "sha512-1doVGmkv3N8l57fVuci4jGMZ61EZBlDzuNZO2b9o0+mexCOs/P96CIpFkaNVvTQgjpyFsW1DlXiUKAvUC9zQfg==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1069,6 +1108,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@typespec/openapi/-/openapi-1.1.0.tgz", "integrity": "sha512-HPvrpSS7eSVk3fEkWndcDTrAZssWRYv3FyDTqVqljildc7FAiXdo88+r5CCK8endmgIrES7uJdHLkcIGUZx1pg==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1082,6 +1122,7 @@ "version": "0.71.0", "resolved": "https://registry.npmjs.org/@typespec/rest/-/rest-0.71.0.tgz", "integrity": "sha512-5qX+nWO5Jx4P1iTTT2REgdCtHsTMjlv/gL90u8cO1ih3yHDtf18a41UL6jSYaVUIvIj6rlmrgopActf0FhhUcw==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1095,6 +1136,7 @@ "version": "0.71.0", "resolved": "https://registry.npmjs.org/@typespec/sse/-/sse-0.71.0.tgz", "integrity": "sha512-4lAwDMj8h/50s6zp/8IX8CLW+H3P+od5O32Bb8+fyTabEo7+H3PbdBbIJGv9Sj7+l8+bZXsyfRXa+aJxR9o2ZA==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1110,6 +1152,7 @@ "version": "0.71.0", "resolved": "https://registry.npmjs.org/@typespec/streams/-/streams-0.71.0.tgz", "integrity": "sha512-ofyAcg8GnO6uTffGo00D6MMfRkqie4QtnUUSGNC1Bam2WG+wkeSG/huP0WNRT8GofzK1N0M6QqQwAW/vdq9ymQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1122,6 +1165,7 @@ "version": "0.71.0", "resolved": "https://registry.npmjs.org/@typespec/versioning/-/versioning-0.71.0.tgz", "integrity": "sha512-8qknFLOpZTVzQ+SveXg9G7WJV8P80yxLlj0nOc3ZLBKiPgM6FF7vGWHRNtnh7s3gSXvWyxopaJ9fZSLZSJmbww==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1134,6 +1178,7 @@ "version": "0.71.0", "resolved": "https://registry.npmjs.org/@typespec/xml/-/xml-0.71.0.tgz", "integrity": "sha512-IcBM4fd5li+hfaUoxeiFrUJx+gCGwIJ+LojdbAZPP3Kbdv12RS+8+CHH6d9qGV3qExgWGCny6WDUrUIaVCLonw==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1146,6 +1191,7 @@ "version": "8.17.1", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.3", @@ -1162,6 +1208,7 @@ "version": "4.3.2", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, "license": "MIT", "dependencies": { "type-fest": "^0.21.3" @@ -1177,6 +1224,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=8" @@ -1186,6 +1234,7 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, "license": "MIT", "dependencies": { "color-convert": "^2.0.1" @@ -1207,6 +1256,7 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, "license": "MIT", "dependencies": { "fill-range": "^7.1.1" @@ -1219,18 +1269,21 @@ "version": "5.4.4", "resolved": "https://registry.npmjs.org/change-case/-/change-case-5.4.4.tgz", "integrity": "sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==", + "dev": true, "license": "MIT" }, "node_modules/chardet": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true, "license": "MIT" }, "node_modules/chownr": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", + "dev": true, "license": "BlueOak-1.0.0", "engines": { "node": ">=18" @@ -1240,6 +1293,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", "integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==", + "dev": true, "license": "ISC", "engines": { "node": ">= 12" @@ -1249,6 +1303,7 @@ "version": "8.0.1", "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, "license": "ISC", "dependencies": { "string-width": "^4.2.0", @@ -1263,6 +1318,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", @@ -1280,6 +1336,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "license": "MIT", "dependencies": { "color-name": "~1.1.4" @@ -1292,18 +1349,21 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, "license": "MIT" }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, "license": "MIT" }, "node_modules/env-paths": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", + "dev": true, "license": "MIT", "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" @@ -1313,9 +1373,9 @@ } }, "node_modules/esbuild": { - "version": "0.25.5", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.5.tgz", - "integrity": "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==", + "version": "0.25.6", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.6.tgz", + "integrity": "sha512-GVuzuUwtdsghE3ocJ9Bs8PNoF13HNQ5TXbEi2AhvVb8xU1Iwt9Fos9FEamfoee+u/TOsn7GUWc04lz46n2bbTg==", "hasInstallScript": true, "license": "MIT", "bin": { @@ -1325,37 +1385,39 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.5", - "@esbuild/android-arm": "0.25.5", - "@esbuild/android-arm64": "0.25.5", - "@esbuild/android-x64": "0.25.5", - "@esbuild/darwin-arm64": "0.25.5", - "@esbuild/darwin-x64": "0.25.5", - "@esbuild/freebsd-arm64": "0.25.5", - "@esbuild/freebsd-x64": "0.25.5", - "@esbuild/linux-arm": "0.25.5", - "@esbuild/linux-arm64": "0.25.5", - "@esbuild/linux-ia32": "0.25.5", - "@esbuild/linux-loong64": "0.25.5", - "@esbuild/linux-mips64el": "0.25.5", - "@esbuild/linux-ppc64": "0.25.5", - "@esbuild/linux-riscv64": "0.25.5", - "@esbuild/linux-s390x": "0.25.5", - "@esbuild/linux-x64": "0.25.5", - "@esbuild/netbsd-arm64": "0.25.5", - "@esbuild/netbsd-x64": "0.25.5", - "@esbuild/openbsd-arm64": "0.25.5", - "@esbuild/openbsd-x64": "0.25.5", - "@esbuild/sunos-x64": "0.25.5", - "@esbuild/win32-arm64": "0.25.5", - "@esbuild/win32-ia32": "0.25.5", - "@esbuild/win32-x64": "0.25.5" + "@esbuild/aix-ppc64": "0.25.6", + "@esbuild/android-arm": "0.25.6", + "@esbuild/android-arm64": "0.25.6", + "@esbuild/android-x64": "0.25.6", + "@esbuild/darwin-arm64": "0.25.6", + "@esbuild/darwin-x64": "0.25.6", + "@esbuild/freebsd-arm64": "0.25.6", + "@esbuild/freebsd-x64": "0.25.6", + "@esbuild/linux-arm": "0.25.6", + "@esbuild/linux-arm64": "0.25.6", + "@esbuild/linux-ia32": "0.25.6", + "@esbuild/linux-loong64": "0.25.6", + "@esbuild/linux-mips64el": "0.25.6", + "@esbuild/linux-ppc64": "0.25.6", + "@esbuild/linux-riscv64": "0.25.6", + "@esbuild/linux-s390x": "0.25.6", + "@esbuild/linux-x64": "0.25.6", + "@esbuild/netbsd-arm64": "0.25.6", + "@esbuild/netbsd-x64": "0.25.6", + "@esbuild/openbsd-arm64": "0.25.6", + "@esbuild/openbsd-x64": "0.25.6", + "@esbuild/openharmony-arm64": "0.25.6", + "@esbuild/sunos-x64": "0.25.6", + "@esbuild/win32-arm64": "0.25.6", + "@esbuild/win32-ia32": "0.25.6", + "@esbuild/win32-x64": "0.25.6" } }, "node_modules/escalade": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -1365,6 +1427,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, "license": "MIT", "dependencies": { "chardet": "^0.7.0", @@ -1379,12 +1442,14 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, "license": "MIT" }, "node_modules/fast-glob": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, "license": "MIT", "dependencies": { "@nodelib/fs.stat": "^2.0.2", @@ -1401,6 +1466,7 @@ "version": "3.0.6", "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==", + "dev": true, "funding": [ { "type": "github", @@ -1417,6 +1483,7 @@ "version": "1.19.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, "license": "ISC", "dependencies": { "reusify": "^1.0.4" @@ -1426,6 +1493,7 @@ "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" @@ -1466,6 +1534,7 @@ "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, "license": "ISC", "engines": { "node": "6.* || 8.* || >= 10.*" @@ -1487,6 +1556,7 @@ "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, "license": "ISC", "dependencies": { "is-glob": "^4.0.1" @@ -1499,6 +1569,7 @@ "version": "14.1.0", "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", + "dev": true, "license": "MIT", "dependencies": { "@sindresorhus/merge-streams": "^2.1.0", @@ -1525,6 +1596,7 @@ "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, "license": "MIT", "dependencies": { "safer-buffer": ">= 2.1.2 < 3" @@ -1537,6 +1609,7 @@ "version": "7.0.5", "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, "license": "MIT", "engines": { "node": ">= 4" @@ -1546,6 +1619,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -1555,6 +1629,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, "license": "MIT", "engines": { "node": ">=8" @@ -1564,6 +1639,7 @@ "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" @@ -1576,6 +1652,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.12.0" @@ -1585,6 +1662,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -1597,6 +1675,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, "license": "MIT" }, "node_modules/js-yaml": { @@ -1615,6 +1694,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, "license": "MIT" }, "node_modules/jsonfile": { @@ -1645,6 +1725,7 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, "license": "MIT", "engines": { "node": ">= 8" @@ -1654,6 +1735,7 @@ "version": "4.0.8", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, "license": "MIT", "dependencies": { "braces": "^3.0.3", @@ -1667,6 +1749,7 @@ "version": "7.1.2", "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, "license": "ISC", "engines": { "node": ">=16 || 14 >=14.17" @@ -1676,6 +1759,7 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", + "dev": true, "license": "MIT", "dependencies": { "minipass": "^7.1.2" @@ -1688,6 +1772,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", + "dev": true, "license": "MIT", "bin": { "mkdirp": "dist/cjs/src/bin.js" @@ -1703,6 +1788,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", + "dev": true, "license": "MIT", "bin": { "mustache": "bin/mustache" @@ -1712,6 +1798,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-2.0.0.tgz", "integrity": "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==", + "dev": true, "license": "ISC", "engines": { "node": "^18.17.0 || >=20.5.0" @@ -1721,6 +1808,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -1730,6 +1818,7 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -1742,12 +1831,14 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, "license": "ISC" }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, "license": "MIT", "engines": { "node": ">=8.6" @@ -1760,6 +1851,7 @@ "version": "8.0.0", "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==", + "dev": true, "license": "MIT", "engines": { "node": ">=4" @@ -1769,6 +1861,7 @@ "version": "3.5.3", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", + "dev": true, "license": "MIT", "bin": { "prettier": "bin/prettier.cjs" @@ -1796,6 +1889,7 @@ "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, "funding": [ { "type": "github", @@ -1816,6 +1910,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -1825,6 +1920,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -1843,6 +1939,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, "license": "MIT", "engines": { "iojs": ">=1.0.0", @@ -1853,6 +1950,7 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, "funding": [ { "type": "github", @@ -1876,6 +1974,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, "license": "MIT" }, "node_modules/semver": { @@ -1894,6 +1993,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, "license": "ISC", "engines": { "node": ">=14" @@ -1906,6 +2006,7 @@ "version": "5.1.0", "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "dev": true, "license": "MIT", "engines": { "node": ">=14.16" @@ -1918,6 +2019,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", @@ -1932,6 +2034,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "license": "MIT", "dependencies": { "ansi-regex": "^5.0.1" @@ -1944,6 +2047,7 @@ "version": "7.4.3", "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", + "dev": true, "license": "ISC", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", @@ -1961,6 +2065,7 @@ "version": "0.3.0", "resolved": "https://registry.npmjs.org/temporal-polyfill/-/temporal-polyfill-0.3.0.tgz", "integrity": "sha512-qNsTkX9K8hi+FHDfHmf22e/OGuXmfBm9RqNismxBrnSmZVJKegQ+HYYXT+R7Ha8F/YSm2Y34vmzD4cxMu2u95g==", + "dev": true, "license": "MIT", "dependencies": { "temporal-spec": "0.3.0" @@ -1970,12 +2075,14 @@ "version": "0.3.0", "resolved": "https://registry.npmjs.org/temporal-spec/-/temporal-spec-0.3.0.tgz", "integrity": "sha512-n+noVpIqz4hYgFSMOSiINNOUOMFtV5cZQNCmmszA6GiVFVRt3G7AqVyhXjhCSmowvQn+NsGn+jMDMKJYHd3bSQ==", + "dev": true, "license": "ISC" }, "node_modules/tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, "license": "MIT", "dependencies": { "os-tmpdir": "~1.0.2" @@ -1988,6 +2095,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, "license": "MIT", "dependencies": { "is-number": "^7.0.0" @@ -2019,6 +2127,7 @@ "version": "0.21.3", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" @@ -2031,6 +2140,7 @@ "version": "0.3.0", "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -2052,6 +2162,7 @@ "version": "8.2.0", "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", + "dev": true, "license": "MIT", "engines": { "node": ">=14.0.0" @@ -2061,6 +2172,7 @@ "version": "9.0.1", "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz", "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==", + "dev": true, "license": "MIT", "dependencies": { "vscode-languageserver-protocol": "3.17.5" @@ -2073,6 +2185,7 @@ "version": "3.17.5", "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", + "dev": true, "license": "MIT", "dependencies": { "vscode-jsonrpc": "8.2.0", @@ -2083,18 +2196,21 @@ "version": "1.0.12", "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", + "dev": true, "license": "MIT" }, "node_modules/vscode-languageserver-types": { "version": "3.17.5", "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", + "dev": true, "license": "MIT" }, "node_modules/wrap-ansi": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", @@ -2130,6 +2246,7 @@ "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, "license": "ISC", "engines": { "node": ">=10" @@ -2139,6 +2256,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "dev": true, "license": "BlueOak-1.0.0", "engines": { "node": ">=18" @@ -2148,6 +2266,7 @@ "version": "2.8.0", "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.0.tgz", "integrity": "sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==", + "dev": true, "license": "ISC", "bin": { "yaml": "bin.mjs" @@ -2160,6 +2279,7 @@ "version": "17.7.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, "license": "MIT", "dependencies": { "cliui": "^8.0.1", @@ -2178,6 +2298,7 @@ "version": "21.1.1", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, "license": "ISC", "engines": { "node": ">=12" @@ -2187,6 +2308,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.2.tgz", "integrity": "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" diff --git a/eng/emitter-package.json b/eng/emitter-package.json index f64a57fca536..569833e55ede 100644 --- a/eng/emitter-package.json +++ b/eng/emitter-package.json @@ -1,23 +1,25 @@ { - "name": "dist/src/index.js", + "main": "dist/src/index.js", "dependencies": { - "@azure-tools/typespec-python": "0.46.0" + "@azure-tools/typespec-python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@azure-tools/typespec-python/-/typespec-python-0.46.0-alpha.20250711.1.tgz" }, "devDependencies": { + "@azure-tools/typespec-autorest": "~0.57.0", + "@azure-tools/typespec-azure-core": "~0.57.0", + "@azure-tools/typespec-azure-resource-manager": "~0.57.0", + "@azure-tools/typespec-azure-rulesets": "~0.57.0", + "@azure-tools/typespec-client-generator-core": "~0.57.2", "@typespec/compiler": "^1.1.0", + "@typespec/events": "~0.71.0", "@typespec/http": "^1.1.0", - "@typespec/rest": "~0.71.0", - "@typespec/versioning": "~0.71.0", "@typespec/openapi": "^1.1.0", - "@typespec/events": "~0.71.0", + "@typespec/rest": "~0.71.0", "@typespec/sse": "~0.71.0", "@typespec/streams": "~0.71.0", - "@typespec/xml": "~0.71.0", - "@azure-tools/typespec-azure-core": "~0.57.0", - "@azure-tools/typespec-azure-resource-manager": "~0.57.2", - "@azure-tools/typespec-autorest": "~0.57.1", - "@azure-tools/typespec-azure-rulesets": "~0.57.1", - "@azure-tools/typespec-client-generator-core": "~0.57.3", - "@azure-tools/typespec-liftr-base": "0.8.0" + "@typespec/versioning": "~0.71.0", + "@typespec/xml": "~0.71.0" + }, + "overrides": { + "@autorest/python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@autorest/python/-/python-6.36.0-alpha.20250711.1.tgz" } -} \ No newline at end of file +} From e0bfb96b5f6ac5b7270c481468aa22d90ef07ff6 Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Fri, 11 Jul 2025 07:39:58 +0000 Subject: [PATCH 2/2] Update SDK code ai_vi_0 --- .../azure/ai/agents/_client.py | 4 +- .../azure-ai-agents/azure/ai/agents/_types.py | 1 - .../azure/ai/agents/_utils/model_base.py | 2 +- .../azure/ai/agents/aio/_client.py | 4 +- .../ai/agents/aio/operations/__init__.py | 2 - .../ai/agents/aio/operations/_operations.py | 2 +- .../azure/ai/agents/operations/__init__.py | 2 - .../azure/ai/agents/operations/_operations.py | 6 +- .../sample_agents_deep_research_async.py | 7 +- .../sample_agents_deep_research.py | 9 +- ...sample_agents_multiple_connected_agents.py | 13 +- .../azure/ai/projects/_utils/model_base.py | 2 +- .../azure/ai/projects/_validation.py | 20 +- .../ai/projects/aio/operations/_operations.py | 11 +- .../azure/ai/projects/models/_models.py | 18 +- .../ai/projects/operations/_operations.py | 11 +- .../generated_tests/conftest.py | 35 + .../test_ai_project_connections_operations.py | 22 + ...ai_project_connections_operations_async.py | 23 + .../test_ai_project_datasets_operations.py | 105 + ...st_ai_project_datasets_operations_async.py | 106 + .../test_ai_project_deployments_operations.py | 33 + ...ai_project_deployments_operations_async.py | 34 + .../test_ai_project_evaluations_operations.py | 71 + ...ai_project_evaluations_operations_async.py | 72 + .../test_ai_project_indexes_operations.py | 87 + ...est_ai_project_indexes_operations_async.py | 88 + .../test_ai_project_red_teams_operations.py | 56 + ...t_ai_project_red_teams_operations_async.py | 57 + .../generated_tests/testpreparer.py | 26 + .../generated_tests/testpreparer_async.py | 20 + sdk/batch/azure-batch/MANIFEST.in | 3 +- sdk/batch/azure-batch/_metadata.json | 3 + sdk/batch/azure-batch/apiview-properties.json | 363 + sdk/batch/azure-batch/azure/batch/__init__.py | 33 +- .../azure/batch/_operations/_patch.py | 55 +- sdk/batch/azure-batch/azure/batch/_patch.py | 2 +- .../azure/batch/aio/_operations/_patch.py | 57 +- .../azure-batch/azure/batch/aio/_patch.py | 2 +- .../azure/batch/models/__init__.py | 2 - .../azure-batch/azure/batch/models/_models.py | 383 +- .../azure-batch/azure/batch/models/_patch.py | 6 +- sdk/batch/azure-batch/client/__init__.py | 32 + sdk/batch/azure-batch/client/_client.py | 101 + .../azure-batch/client/_configuration.py | 65 + .../client/_operations/__init__.py | 22 + .../client/_operations/_operations.py | 11483 ++++++++++++++++ .../azure-batch/client/_operations/_patch.py | 21 + sdk/batch/azure-batch/client/_patch.py | 21 + .../azure-batch/client/_utils/__init__.py | 6 + .../azure-batch/client/_utils/model_base.py | 1232 ++ .../client/_utils/serialization.py | 2032 +++ sdk/batch/azure-batch/client/_utils/utils.py | 57 + sdk/batch/azure-batch/client/_version.py | 9 + sdk/batch/azure-batch/client/aio/__init__.py | 29 + sdk/batch/azure-batch/client/aio/_client.py | 103 + .../azure-batch/client/aio/_configuration.py | 65 + .../client/aio/_operations/__init__.py | 22 + .../client/aio/_operations/_operations.py | 8307 +++++++++++ .../client/aio/_operations/_patch.py | 21 + sdk/batch/azure-batch/client/aio/_patch.py | 21 + sdk/batch/azure-batch/client/py.typed | 1 + sdk/batch/azure-batch/setup.py | 16 +- sdk/batch/azure-batch/tests/test_batch.py | 4 +- .../radiologyinsights/_client.py | 4 +- .../radiologyinsights/_operations/__init__.py | 5 +- .../_operations/_operations.py | 2 +- .../radiologyinsights/_utils/model_base.py | 2 +- .../radiologyinsights/aio/_client.py | 4 +- .../aio/_operations/__init__.py | 5 +- .../aio/_operations/_operations.py | 2 +- .../generated_tests/conftest.py | 39 + .../test_radiology_insights.py | 474 + .../test_radiology_insights_async.py | 486 + .../generated_tests/testpreparer.py | 26 + .../generated_tests/testpreparer_async.py | 20 + .../setup.py | 5 +- .../azure-keyvault-administration/MANIFEST.in | 3 +- .../_metadata.json | 3 + .../apiview-properties.json | 70 + .../azure/__init__.py | 4 - .../azure/keyvault/__init__.py | 4 - .../azure/keyvault/administration/__init__.py | 38 +- .../administration/_access_control_client.py | 38 +- .../keyvault/administration/_backup_client.py | 28 +- .../administration/_generated/_client.py | 5 +- .../administration/_generated/_validation.py | 20 +- .../administration/_generated/aio/_client.py | 5 +- .../_generated/aio/operations/__init__.py | 2 - .../_generated/aio/operations/_operations.py | 28 +- .../_generated/operations/__init__.py | 2 - .../_generated/operations/_operations.py | 28 +- .../_internal/async_challenge_auth_policy.py | 10 +- .../_internal/async_client_base.py | 8 +- .../administration/_internal/client_base.py | 2 +- .../_internal/http_challenge.py | 8 +- .../azure/keyvault/administration/_models.py | 16 +- .../administration/_settings_client.py | 7 +- .../aio/_access_control_client.py | 38 +- .../administration/aio/_backup_client.py | 28 +- .../administration/aio/_settings_client.py | 7 +- .../samples/access_control_operations.py | 6 +- .../access_control_operations_async.py | 9 +- .../samples/backup_restore_operations.py | 2 +- .../backup_restore_operations_async.py | 5 +- .../samples/settings_operations.py | 3 +- .../samples/settings_operations_async.py | 4 +- .../azure-keyvault-administration/setup.py | 15 +- .../tests/_async_test_case.py | 10 +- .../tests/_test_case.py | 8 +- .../tests/conftest.py | 17 +- .../perfstress_tests/get_role_definition.py | 8 +- .../tests/test_access_control.py | 14 +- .../tests/test_access_control_async.py | 19 +- .../tests/test_backup_client.py | 8 +- .../tests/test_backup_client_async.py | 15 +- .../tests/test_examples_administration.py | 5 +- .../test_examples_administration_async.py | 3 +- .../azure-keyvault-certificates/MANIFEST.in | 3 +- .../_metadata.json | 3 + .../apiview-properties.json | 98 + .../azure/__init__.py | 4 - .../azure/keyvault/__init__.py | 4 - .../azure/keyvault/certificates/__init__.py | 57 +- .../azure/keyvault/certificates/_client.py | 90 +- .../certificates/_generated/_client.py | 4 +- .../_generated/_operations/__init__.py | 5 +- .../_generated/_operations/_operations.py | 12 +- .../certificates/_generated/aio/_client.py | 4 +- .../_generated/aio/_operations/__init__.py | 5 +- .../_generated/aio/_operations/_operations.py | 12 +- .../azure/keyvault/certificates/_models.py | 3 +- .../_shared/async_challenge_auth_policy.py | 10 +- .../keyvault/certificates/aio/_client.py | 99 +- .../samples/backup_restore_operations.py | 1 + .../backup_restore_operations_async.py | 1 + .../samples/contacts.py | 1 + .../samples/contacts_async.py | 1 + .../samples/hello_world.py | 1 + .../samples/hello_world_async.py | 1 + .../samples/import_certificate.py | 1 + .../samples/import_certificate_async.py | 1 + .../samples/issuers.py | 1 + .../samples/issuers_async.py | 1 + .../samples/list_operations.py | 1 + .../samples/list_operations_async.py | 1 + .../samples/parse_certificate.py | 1 + .../samples/parse_certificate_async.py | 1 + .../samples/recover_purge_operations.py | 1 + .../samples/recover_purge_operations_async.py | 1 + .../azure-keyvault-certificates/setup.py | 19 +- .../tests/certs.py | 1 + .../tests/test_certificates_client.py | 5 +- .../tests/test_certificates_client_async.py | 1 + sdk/keyvault/azure-keyvault-keys/MANIFEST.in | 3 +- .../azure-keyvault-keys/_metadata.json | 3 + .../apiview-properties.json | 92 + .../azure-keyvault-keys/azure/__init__.py | 4 - .../azure/keyvault/__init__.py | 4 - .../azure/keyvault/keys/__init__.py | 44 +- .../azure/keyvault/keys/_client.py | 34 +- .../azure/keyvault/keys/_generated/_client.py | 4 +- .../keys/_generated/_operations/__init__.py | 5 +- .../_generated/_operations/_operations.py | 15 +- .../keyvault/keys/_generated/_validation.py | 20 +- .../keyvault/keys/_generated/aio/_client.py | 4 +- .../_generated/aio/_operations/__init__.py | 5 +- .../_generated/aio/_operations/_operations.py | 15 +- .../azure/keyvault/keys/_models.py | 8 +- .../_shared/async_challenge_auth_policy.py | 10 +- .../azure/keyvault/keys/aio/_client.py | 21 +- .../azure/keyvault/keys/crypto/_client.py | 36 +- .../azure/keyvault/keys/crypto/_enums.py | 1 + .../keyvault/keys/crypto/_internal/rsa_key.py | 9 +- .../azure/keyvault/keys/crypto/_models.py | 1 + .../azure/keyvault/keys/crypto/aio/_client.py | 14 +- .../samples/backup_restore_operations.py | 3 +- .../backup_restore_operations_async.py | 4 +- .../samples/hello_world.py | 7 +- .../samples/hello_world_async.py | 2 + .../samples/key_rotation.py | 3 +- .../samples/key_rotation_async.py | 4 +- .../samples/list_operations.py | 3 +- .../samples/list_operations_async.py | 2 + .../samples/recover_purge_operations.py | 3 +- .../samples/recover_purge_operations_async.py | 2 + .../samples/send_request.py | 5 +- sdk/keyvault/azure-keyvault-keys/setup.py | 68 + .../tests/_keys_test_case.py | 6 +- .../tests/_shared/test_case.py | 2 - .../tests/_shared/test_case_async.py | 2 +- .../azure-keyvault-keys/tests/conftest.py | 31 +- .../azure-keyvault-keys/tests/keys.py | 1 + .../tests/perfstress_tests/sign.py | 1 + .../tests/test_challenge_auth.py | 18 +- .../tests/test_challenge_auth_async.py | 24 +- .../tests/test_crypto_client.py | 87 +- .../tests/test_crypto_client_async.py | 76 +- .../tests/test_examples_crypto.py | 5 +- .../tests/test_examples_crypto_async.py | 10 +- .../tests/test_key_client.py | 106 +- .../tests/test_keys_async.py | 132 +- .../tests/test_local_crypto.py | 16 +- .../tests/test_parse_id.py | 2 +- .../tests/test_samples_keys.py | 12 +- .../tests/test_samples_keys_async.py | 12 +- .../azure-keyvault-secrets/MANIFEST.in | 3 +- .../azure-keyvault-secrets/_metadata.json | 3 + .../apiview-properties.json | 41 + .../azure-keyvault-secrets/azure/__init__.py | 5 - .../azure/keyvault/__init__.py | 5 - .../azure/keyvault/secrets/__init__.py | 20 +- .../azure/keyvault/secrets/_client.py | 45 +- .../keyvault/secrets/_generated/_client.py | 4 +- .../_generated/_operations/__init__.py | 5 +- .../_generated/_operations/_operations.py | 10 +- .../secrets/_generated/aio/_client.py | 4 +- .../_generated/aio/_operations/__init__.py | 5 +- .../_generated/aio/_operations/_operations.py | 10 +- .../_shared/async_challenge_auth_policy.py | 10 +- .../azure/keyvault/secrets/aio/_client.py | 29 +- .../samples/backup_restore_operations.py | 1 + .../backup_restore_operations_async.py | 4 +- .../samples/hello_world.py | 1 + .../samples/hello_world_async.py | 2 + .../samples/list_operations.py | 13 +- .../samples/list_operations_async.py | 10 +- .../samples/recover_purge_operations.py | 1 + .../samples/recover_purge_operations_async.py | 2 + sdk/keyvault/azure-keyvault-secrets/setup.py | 19 +- .../azure-keyvault-secrets/tests/conftest.py | 2 +- .../tests/test_polling_method.py | 8 +- .../tests/test_secrets_async.py | 1 + .../tests/test_secrets_client.py | 1 + .../azure-keyvault-securitydomain/MANIFEST.in | 4 +- .../_metadata.json | 3 + .../apiview-properties.json | 28 +- .../securitydomain/_internal/async_polling.py | 1 + .../securitydomain/_internal/polling.py | 1 + .../clientcustomizations/__init__.py | 32 + .../clientcustomizations/_client.py | 100 + .../clientcustomizations/_configuration.py | 63 + .../_operations/__init__.py | 22 + .../_operations/_operations.py | 634 + .../_operations/_patch.py | 21 + .../clientcustomizations/_patch.py | 21 + .../clientcustomizations/_utils/__init__.py | 6 + .../clientcustomizations/_utils/model_base.py | 1232 ++ .../_utils/serialization.py | 2032 +++ .../clientcustomizations/_utils/utils.py | 25 + .../clientcustomizations/_version.py | 9 + .../clientcustomizations/aio/__init__.py | 29 + .../clientcustomizations/aio/_client.py | 102 + .../aio/_configuration.py | 63 + .../aio/_operations/__init__.py | 22 + .../aio/_operations/_operations.py | 539 + .../aio/_operations/_patch.py | 21 + .../clientcustomizations/aio/_patch.py | 21 + .../clientcustomizations/models/__init__.py | 29 + .../clientcustomizations/models/_models.py | 44 + .../clientcustomizations/models/_patch.py | 21 + .../clientcustomizations/py.typed | 1 + .../keyvault/__init__.py | 1 + .../keyvault/models/__init__.py | 40 + .../keyvault/models/_enums.py | 21 + .../keyvault/models/_models.py | 230 + .../keyvault/models/_patch.py | 21 + .../azure-keyvault-securitydomain/setup.py | 13 +- .../azure-developer-loadtesting/MANIFEST.in | 4 +- .../_metadata.json | 3 + .../apiview-properties.json | 133 + .../customizations/__init__.py | 34 + .../customizations/_client.py | 176 + .../customizations/_configuration.py | 112 + .../customizations/_operations/__init__.py | 22 + .../customizations/_operations/_operations.py | 4505 ++++++ .../customizations/_operations/_patch.py | 21 + .../customizations/_patch.py | 21 + .../customizations/_utils/__init__.py | 6 + .../customizations/_utils/model_base.py | 1232 ++ .../customizations/_utils/serialization.py | 2032 +++ .../customizations/_utils/utils.py | 25 + .../customizations/_validation.py | 66 + .../customizations/_version.py | 9 + .../customizations/aio/__init__.py | 31 + .../customizations/aio/_client.py | 180 + .../customizations/aio/_configuration.py | 112 + .../aio/_operations/__init__.py | 22 + .../aio/_operations/_operations.py | 3492 +++++ .../customizations/aio/_operations/_patch.py | 21 + .../customizations/aio/_patch.py | 21 + .../customizations/py.typed | 1 + .../create_or_update_test.py | 86 + .../create_or_update_test_app_components.py | 52 + .../create_or_update_test_profile.py | 55 + ...reate_or_update_test_run_app_components.py | 52 + ...r_update_test_run_server_metrics_config.py | 55 + ...te_or_update_test_server_metrics_config.py | 55 + .../generated_samples/delete_test.py | 40 + .../generated_samples/delete_test_file.py | 41 + .../generated_samples/delete_test_profile.py | 40 + .../delete_test_profile_run.py | 40 + .../generated_samples/delete_test_run.py | 40 + .../generated_samples/get_test.py | 41 + .../generated_samples/get_test_file.py | 42 + .../generated_samples/get_test_profile.py | 41 + .../get_test_profile_run_executed.py | 41 + .../get_test_profile_run_executing.py | 41 + .../generated_samples/get_test_run.py | 41 + .../generated_samples/get_test_run_file.py | 42 + .../list_metric_dimension_values.py | 46 + .../list_test_app_components.py | 41 + .../generated_samples/list_test_files.py | 42 + .../list_test_profile_runs.py | 40 + .../generated_samples/list_test_profiles.py | 40 + .../list_test_run_app_components.py | 41 + .../list_test_run_metrics.py | 45 + .../list_test_run_metrics_definitions.py | 42 + .../list_test_run_metrics_namespaces.py | 41 + .../list_test_run_server_metrics_config.py | 41 + .../generated_samples/list_test_runs.py | 40 + .../list_test_server_metrics_config.py | 41 + .../generated_samples/list_tests.py | 40 + .../stop_test_profile_run.py | 41 + .../generated_samples/stop_test_run.py | 41 + .../generated_tests/conftest.py | 56 + .../test_load_test_administration.py | 341 + .../test_load_test_administration_async.py | 342 + .../generated_tests/test_load_test_run.py | 242 + .../test_load_test_run_async.py | 243 + .../generated_tests/testpreparer.py | 44 + .../generated_tests/testpreparer_async.py | 31 + .../microsoft/__init__.py | 1 + .../microsoft/loadtestservice/__init__.py | 1 + .../loadtestservice/models/__init__.py | 156 + .../loadtestservice/models/_enums.py | 317 + .../loadtestservice/models/_models.py | 2605 ++++ .../loadtestservice/models/_patch.py | 21 + .../create_or_update_app_components_test.py | 1 + .../samples/create_or_update_test.py | 1 + .../samples/create_or_update_test_profile.py | 7 +- .../samples/upload_test_file.py | 1 + .../azure-developer-loadtesting/setup.py | 17 +- ...test_async_load_test_administration_ops.py | 60 +- .../tests/test_async_load_test_run_ops.py | 69 +- .../test_load_test_administration_ops.py | 63 +- .../tests/test_load_test_run_ops.py | 79 +- .../tests/testcase.py | 2 +- .../tests/testcase_async.py | 6 +- .../azure/onlineexperimentation/_client.py | 4 +- .../_operations/__init__.py | 5 +- .../_operations/_operations.py | 3 +- .../_utils/model_base.py | 2 +- .../onlineexperimentation/aio/_client.py | 4 +- .../aio/_operations/__init__.py | 5 +- .../aio/_operations/_operations.py | 2 +- .../azure-onlineexperimentation/setup.py | 5 +- .../azure-schemaregistry/_metadata.json | 3 + .../azure/schemaregistry/__init__.py | 1 + .../azure/schemaregistry/_client.py | 4 +- .../schemaregistry/_operations/__init__.py | 5 +- .../schemaregistry/_operations/_operations.py | 13 +- .../azure/schemaregistry/aio/__init__.py | 2 + .../azure/schemaregistry/aio/_client.py | 4 +- .../aio/_operations/__init__.py | 5 +- .../aio/_operations/_operations.py | 13 +- .../generated_tests/conftest.py | 39 + .../generated_tests/testpreparer.py | 26 + .../generated_tests/testpreparer_async.py | 20 + 369 files changed, 51274 insertions(+), 1672 deletions(-) create mode 100644 sdk/ai/azure-ai-projects/generated_tests/conftest.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations_async.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations_async.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_deployments_operations.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_deployments_operations_async.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_evaluations_operations.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_evaluations_operations_async.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations_async.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_red_teams_operations.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/test_ai_project_red_teams_operations_async.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/testpreparer.py create mode 100644 sdk/ai/azure-ai-projects/generated_tests/testpreparer_async.py create mode 100644 sdk/batch/azure-batch/_metadata.json create mode 100644 sdk/batch/azure-batch/apiview-properties.json create mode 100644 sdk/batch/azure-batch/client/__init__.py create mode 100644 sdk/batch/azure-batch/client/_client.py create mode 100644 sdk/batch/azure-batch/client/_configuration.py create mode 100644 sdk/batch/azure-batch/client/_operations/__init__.py create mode 100644 sdk/batch/azure-batch/client/_operations/_operations.py create mode 100644 sdk/batch/azure-batch/client/_operations/_patch.py create mode 100644 sdk/batch/azure-batch/client/_patch.py create mode 100644 sdk/batch/azure-batch/client/_utils/__init__.py create mode 100644 sdk/batch/azure-batch/client/_utils/model_base.py create mode 100644 sdk/batch/azure-batch/client/_utils/serialization.py create mode 100644 sdk/batch/azure-batch/client/_utils/utils.py create mode 100644 sdk/batch/azure-batch/client/_version.py create mode 100644 sdk/batch/azure-batch/client/aio/__init__.py create mode 100644 sdk/batch/azure-batch/client/aio/_client.py create mode 100644 sdk/batch/azure-batch/client/aio/_configuration.py create mode 100644 sdk/batch/azure-batch/client/aio/_operations/__init__.py create mode 100644 sdk/batch/azure-batch/client/aio/_operations/_operations.py create mode 100644 sdk/batch/azure-batch/client/aio/_operations/_patch.py create mode 100644 sdk/batch/azure-batch/client/aio/_patch.py create mode 100644 sdk/batch/azure-batch/client/py.typed create mode 100644 sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/conftest.py create mode 100644 sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/test_radiology_insights.py create mode 100644 sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/test_radiology_insights_async.py create mode 100644 sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/testpreparer.py create mode 100644 sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/testpreparer_async.py create mode 100644 sdk/keyvault/azure-keyvault-administration/_metadata.json create mode 100644 sdk/keyvault/azure-keyvault-administration/apiview-properties.json create mode 100644 sdk/keyvault/azure-keyvault-certificates/_metadata.json create mode 100644 sdk/keyvault/azure-keyvault-certificates/apiview-properties.json create mode 100644 sdk/keyvault/azure-keyvault-keys/_metadata.json create mode 100644 sdk/keyvault/azure-keyvault-keys/apiview-properties.json create mode 100644 sdk/keyvault/azure-keyvault-keys/setup.py create mode 100644 sdk/keyvault/azure-keyvault-secrets/_metadata.json create mode 100644 sdk/keyvault/azure-keyvault-secrets/apiview-properties.json create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/_metadata.json create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/__init__.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_client.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_configuration.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/__init__.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/_operations.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/_patch.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_patch.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/__init__.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/model_base.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/serialization.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/utils.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_version.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/__init__.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_client.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_configuration.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/__init__.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/_operations.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/_patch.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_patch.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/__init__.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/_models.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/_patch.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/py.typed create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/keyvault/__init__.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/__init__.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_enums.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_models.py create mode 100644 sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_patch.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/_metadata.json create mode 100644 sdk/loadtesting/azure-developer-loadtesting/apiview-properties.json create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_client.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_configuration.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_operations.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_patch.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_patch.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/model_base.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/serialization.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/utils.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_validation.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_version.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_client.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_configuration.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_operations.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_patch.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_patch.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/py.typed create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_app_components.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_profile.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_app_components.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_server_metrics_config.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_server_metrics_config.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_file.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_file.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executed.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executing.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run_file.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_metric_dimension_values.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_app_components.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_files.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profile_runs.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profiles.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_app_components.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_definitions.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_namespaces.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_server_metrics_config.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_runs.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_server_metrics_config.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_tests.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_profile_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/conftest.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration_async.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run_async.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer_async.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_enums.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_models.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_patch.py create mode 100644 sdk/schemaregistry/azure-schemaregistry/_metadata.json create mode 100644 sdk/schemaregistry/azure-schemaregistry/generated_tests/conftest.py create mode 100644 sdk/schemaregistry/azure-schemaregistry/generated_tests/testpreparer.py create mode 100644 sdk/schemaregistry/azure-schemaregistry/generated_tests/testpreparer_async.py diff --git a/sdk/ai/azure-ai-agents/azure/ai/agents/_client.py b/sdk/ai/azure-ai-agents/azure/ai/agents/_client.py index de44a3b5aec4..2bd28aaad5c4 100644 --- a/sdk/ai/azure-ai-agents/azure/ai/agents/_client.py +++ b/sdk/ai/azure-ai-agents/azure/ai/agents/_client.py @@ -17,7 +17,6 @@ from ._configuration import AgentsClientConfiguration from ._utils.serialization import Deserializer, Serializer from .operations import ( - AgentsClientOperationsMixin, FilesOperations, MessagesOperations, RunStepsOperations, @@ -27,12 +26,13 @@ VectorStoreFilesOperations, VectorStoresOperations, ) +from .operations._operations import _AgentsClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class AgentsClient(AgentsClientOperationsMixin): # pylint: disable=too-many-instance-attributes +class AgentsClient(_AgentsClientOperationsMixin): # pylint: disable=too-many-instance-attributes """AgentsClient. :ivar threads: ThreadsOperations operations diff --git a/sdk/ai/azure-ai-agents/azure/ai/agents/_types.py b/sdk/ai/azure-ai-agents/azure/ai/agents/_types.py index 9b16b6083887..8ff24fb65ae5 100644 --- a/sdk/ai/azure-ai-agents/azure/ai/agents/_types.py +++ b/sdk/ai/azure-ai-agents/azure/ai/agents/_types.py @@ -1,4 +1,3 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. diff --git a/sdk/ai/azure-ai-agents/azure/ai/agents/_utils/model_base.py b/sdk/ai/azure-ai-agents/azure/ai/agents/_utils/model_base.py index aaa6692b2346..49d5c7259389 100644 --- a/sdk/ai/azure-ai-agents/azure/ai/agents/_utils/model_base.py +++ b/sdk/ai/azure-ai-agents/azure/ai/agents/_utils/model_base.py @@ -1,4 +1,4 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. diff --git a/sdk/ai/azure-ai-agents/azure/ai/agents/aio/_client.py b/sdk/ai/azure-ai-agents/azure/ai/agents/aio/_client.py index dede9d72b755..4492463b44fb 100644 --- a/sdk/ai/azure-ai-agents/azure/ai/agents/aio/_client.py +++ b/sdk/ai/azure-ai-agents/azure/ai/agents/aio/_client.py @@ -17,7 +17,6 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import AgentsClientConfiguration from .operations import ( - AgentsClientOperationsMixin, FilesOperations, MessagesOperations, RunStepsOperations, @@ -27,12 +26,13 @@ VectorStoreFilesOperations, VectorStoresOperations, ) +from .operations._operations import _AgentsClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class AgentsClient(AgentsClientOperationsMixin): # pylint: disable=too-many-instance-attributes +class AgentsClient(_AgentsClientOperationsMixin): # pylint: disable=too-many-instance-attributes """AgentsClient. :ivar threads: ThreadsOperations operations diff --git a/sdk/ai/azure-ai-agents/azure/ai/agents/aio/operations/__init__.py b/sdk/ai/azure-ai-agents/azure/ai/agents/aio/operations/__init__.py index 68b814db1f6f..b323c1c3c268 100644 --- a/sdk/ai/azure-ai-agents/azure/ai/agents/aio/operations/__init__.py +++ b/sdk/ai/azure-ai-agents/azure/ai/agents/aio/operations/__init__.py @@ -20,7 +20,6 @@ from ._operations import VectorStoresOperations # type: ignore from ._operations import VectorStoreFilesOperations # type: ignore from ._operations import VectorStoreFileBatchesOperations # type: ignore -from ._operations import AgentsClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * @@ -35,7 +34,6 @@ "VectorStoresOperations", "VectorStoreFilesOperations", "VectorStoreFileBatchesOperations", - "AgentsClientOperationsMixin", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/ai/azure-ai-agents/azure/ai/agents/aio/operations/_operations.py b/sdk/ai/azure-ai-agents/azure/ai/agents/aio/operations/_operations.py index 8a6ebbb9015d..5d5850367c9a 100644 --- a/sdk/ai/azure-ai-agents/azure/ai/agents/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-agents/azure/ai/agents/aio/operations/_operations.py @@ -3975,7 +3975,7 @@ async def get_next(_continuation_token=None): return AsyncItemPaged(get_next, extract_data) -class AgentsClientOperationsMixin( +class _AgentsClientOperationsMixin( ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], AgentsClientConfiguration] ): diff --git a/sdk/ai/azure-ai-agents/azure/ai/agents/operations/__init__.py b/sdk/ai/azure-ai-agents/azure/ai/agents/operations/__init__.py index 68b814db1f6f..b323c1c3c268 100644 --- a/sdk/ai/azure-ai-agents/azure/ai/agents/operations/__init__.py +++ b/sdk/ai/azure-ai-agents/azure/ai/agents/operations/__init__.py @@ -20,7 +20,6 @@ from ._operations import VectorStoresOperations # type: ignore from ._operations import VectorStoreFilesOperations # type: ignore from ._operations import VectorStoreFileBatchesOperations # type: ignore -from ._operations import AgentsClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * @@ -35,7 +34,6 @@ "VectorStoresOperations", "VectorStoreFilesOperations", "VectorStoreFileBatchesOperations", - "AgentsClientOperationsMixin", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/ai/azure-ai-agents/azure/ai/agents/operations/_operations.py b/sdk/ai/azure-ai-agents/azure/ai/agents/operations/_operations.py index cbc687d3e037..718fd04ead51 100644 --- a/sdk/ai/azure-ai-agents/azure/ai/agents/operations/_operations.py +++ b/sdk/ai/azure-ai-agents/azure/ai/agents/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5087,7 +5087,9 @@ def get_next(_continuation_token=None): return ItemPaged(get_next, extract_data) -class AgentsClientOperationsMixin(ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], AgentsClientConfiguration]): +class _AgentsClientOperationsMixin( + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], AgentsClientConfiguration] +): @overload def create_agent( diff --git a/sdk/ai/azure-ai-agents/samples/agents_async/sample_agents_deep_research_async.py b/sdk/ai/azure-ai-agents/samples/agents_async/sample_agents_deep_research_async.py index 200232968288..f9597e216169 100644 --- a/sdk/ai/azure-ai-agents/samples/agents_async/sample_agents_deep_research_async.py +++ b/sdk/ai/azure-ai-agents/samples/agents_async/sample_agents_deep_research_async.py @@ -64,10 +64,7 @@ async def fetch_and_print_new_agent_response( return response.id -def create_research_summary( - message : ThreadMessage, - filepath: str = "research_summary.md" -) -> None: +def create_research_summary(message: ThreadMessage, filepath: str = "research_summary.md") -> None: if not message: print("No message content provided, cannot create research summary.") return @@ -167,4 +164,4 @@ async def main() -> None: if __name__ == "__main__": - asyncio.run(main()) \ No newline at end of file + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-agents/samples/agents_tools/sample_agents_deep_research.py b/sdk/ai/azure-ai-agents/samples/agents_tools/sample_agents_deep_research.py index 7900158dcba3..c8d7b178ef78 100644 --- a/sdk/ai/azure-ai-agents/samples/agents_tools/sample_agents_deep_research.py +++ b/sdk/ai/azure-ai-agents/samples/agents_tools/sample_agents_deep_research.py @@ -60,10 +60,7 @@ def fetch_and_print_new_agent_response( return response.id -def create_research_summary( - message : ThreadMessage, - filepath: str = "research_summary.md" -) -> None: +def create_research_summary(message: ThreadMessage, filepath: str = "research_summary.md") -> None: if not message: print("No message content provided, cannot create research summary.") return @@ -154,9 +151,7 @@ def create_research_summary( print(f"Run failed: {run.last_error}") # Fetch the final message from the agent in the thread and create a research summary - final_message = agents_client.messages.get_last_message_by_role( - thread_id=thread.id, role=MessageRole.AGENT - ) + final_message = agents_client.messages.get_last_message_by_role(thread_id=thread.id, role=MessageRole.AGENT) if final_message: create_research_summary(final_message) diff --git a/sdk/ai/azure-ai-agents/samples/agents_tools/sample_agents_multiple_connected_agents.py b/sdk/ai/azure-ai-agents/samples/agents_tools/sample_agents_multiple_connected_agents.py index 1e66d6dedadb..13dc46a2b746 100644 --- a/sdk/ai/azure-ai-agents/samples/agents_tools/sample_agents_multiple_connected_agents.py +++ b/sdk/ai/azure-ai-agents/samples/agents_tools/sample_agents_multiple_connected_agents.py @@ -35,7 +35,6 @@ ListSortOrder, MessageRole, RunStepToolCallDetails, - ) from azure.identity import DefaultAzureCredential @@ -146,14 +145,16 @@ # Delete the connected Agent when done agents_client.delete_agent(weather_agent.id) print("Deleted weather agent") - + # [START list_tool_calls] for run_step in agents_client.run_steps.list(thread_id=thread.id, run_id=run.id, order=ListSortOrder.ASCENDING): if isinstance(run_step.step_details, RunStepToolCallDetails): for tool_call in run_step.step_details.tool_calls: - print(f"\tAgent: {tool_call._data['connected_agent']['name']} " - f"query: {tool_call._data['connected_agent']['arguments']} ", - f"output: {tool_call._data['connected_agent']['output']}") + print( + f"\tAgent: {tool_call._data['connected_agent']['name']} " + f"query: {tool_call._data['connected_agent']['arguments']} ", + f"output: {tool_call._data['connected_agent']['output']}", + ) # [END list_tool_calls] # [START list_messages] @@ -162,7 +163,7 @@ for msg in messages: if msg.text_messages: last_text = msg.text_messages[-1] - text = last_text.text.value.replace('\u3010', '[').replace('\u3011', ']') + text = last_text.text.value.replace("\u3010", "[").replace("\u3011", "]") print(f"{msg.role}: {text}") # [END list_messages] diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py index aaa6692b2346..49d5c7259389 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py @@ -1,4 +1,4 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_validation.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_validation.py index 752b2822f9d3..f5af3a4eb8a2 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_validation.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_validation.py @@ -10,6 +10,22 @@ def api_version_validation(**kwargs): params_added_on = kwargs.pop("params_added_on", {}) method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default def decorator(func): @functools.wraps(func) @@ -21,7 +37,7 @@ def wrapper(*args, **kwargs): except AttributeError: return func(*args, **kwargs) - if method_added_on > client_api_version: + if _index_with_default(method_added_on) > _index_with_default(client_api_version): raise ValueError( f"'{func.__name__}' is not available in API version " f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." @@ -31,7 +47,7 @@ def wrapper(*args, **kwargs): parameter: api_version for api_version, parameters in params_added_on.items() for parameter in parameters - if parameter in kwargs and api_version > client_api_version + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) } if unsupported: raise ValueError( diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index 71fcc6e72e5f..d02b23793e13 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -333,6 +333,7 @@ def __init__(self, *args, **kwargs) -> None: @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) async def get(self, name: str, **kwargs: Any) -> _models.Evaluation: """Get an evaluation run by name. @@ -402,6 +403,7 @@ async def get(self, name: str, **kwargs: Any) -> _models.Evaluation: @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "client_request_id", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Evaluation"]: """List evaluation runs. @@ -536,6 +538,7 @@ async def create( @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) async def create(self, evaluation: Union[_models.Evaluation, JSON, IO[bytes]], **kwargs: Any) -> _models.Evaluation: """Creates an evaluation run. @@ -658,6 +661,7 @@ async def create_agent_evaluation( @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) async def create_agent_evaluation( self, evaluation: Union[_models.AgentEvaluationRequest, JSON, IO[bytes]], **kwargs: Any @@ -1022,7 +1026,7 @@ async def delete(self, name: str, version: str, **kwargs: Any) -> None: response = pipeline_response.http_response - if response.status_code not in [204, 200]: + if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) @@ -2063,6 +2067,7 @@ def __init__(self, *args, **kwargs) -> None: @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) async def get(self, name: str, **kwargs: Any) -> _models.RedTeam: """Get a redteam by name. @@ -2132,6 +2137,7 @@ async def get(self, name: str, **kwargs: Any) -> _models.RedTeam: @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "client_request_id", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) def list(self, **kwargs: Any) -> AsyncItemPaged["_models.RedTeam"]: """List a redteam by name. @@ -2264,6 +2270,7 @@ async def create( @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) async def create(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwargs: Any) -> _models.RedTeam: """Creates a redteam run. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index 0d3fe4140e7b..8366c5b27325 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -559,7 +559,8 @@ class BlobReference(_Model): """Blob reference details. :ivar blob_uri: Blob URI path for client to upload data. Example: - ``https://blob.windows.core.net/Container/Path``. Required. + `https://blob.windows.core.net/Container/Path `_. + Required. :vartype blob_uri: str :ivar storage_account_arm_id: ARM ID of the storage account to use. Required. :vartype storage_account_arm_id: str @@ -568,7 +569,8 @@ class BlobReference(_Model): """ blob_uri: str = rest_field(name="blobUri", visibility=["read", "create", "update", "delete", "query"]) - """Blob URI path for client to upload data. Example: ``https://blob.windows.core.net/Container/Path``. Required.""" + """Blob URI path for client to upload data. Example: `https://blob.windows.core.net/Container/Path + `_. Required.""" storage_account_arm_id: str = rest_field( name="storageAccountArmId", visibility=["read", "create", "update", "delete", "query"] ) @@ -738,7 +740,8 @@ class DatasetVersion(_Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: FileDatasetVersion, FolderDatasetVersion - :ivar data_uri: URI of the data. Example: ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required. + :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required. :vartype data_uri: str :ivar type: Dataset type. Required. Known values are: "uri_file" and "uri_folder". :vartype type: str or ~azure.ai.projects.models.DatasetType @@ -763,7 +766,8 @@ class DatasetVersion(_Model): __mapping__: Dict[str, _Model] = {} data_uri: str = rest_field(name="dataUri", visibility=["read", "create"]) - """URI of the data. Example: ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required.""" + """URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required.""" type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) """Dataset type. Required. Known values are: \"uri_file\" and \"uri_folder\".""" is_reference: Optional[bool] = rest_field(name="isReference", visibility=["read"]) @@ -1071,7 +1075,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class FileDatasetVersion(DatasetVersion, discriminator="uri_file"): """FileDatasetVersion Definition. - :ivar data_uri: URI of the data. Example: ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required. + :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required. :vartype data_uri: str :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset manages storage itself. If true, the underlying data will not be deleted when the dataset @@ -1121,7 +1126,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class FolderDatasetVersion(DatasetVersion, discriminator="uri_folder"): """FileDatasetVersion Definition. - :ivar data_uri: URI of the data. Example: ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required. + :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required. :vartype data_uri: str :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset manages storage itself. If true, the underlying data will not be deleted when the dataset diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index 0992676f7796..b0657bb36870 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -889,6 +889,7 @@ def __init__(self, *args, **kwargs) -> None: @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) def get(self, name: str, **kwargs: Any) -> _models.Evaluation: """Get an evaluation run by name. @@ -958,6 +959,7 @@ def get(self, name: str, **kwargs: Any) -> _models.Evaluation: @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "client_request_id", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) def list(self, **kwargs: Any) -> ItemPaged["_models.Evaluation"]: """List evaluation runs. @@ -1090,6 +1092,7 @@ def create( @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) def create(self, evaluation: Union[_models.Evaluation, JSON, IO[bytes]], **kwargs: Any) -> _models.Evaluation: """Creates an evaluation run. @@ -1212,6 +1215,7 @@ def create_agent_evaluation( @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) def create_agent_evaluation( self, evaluation: Union[_models.AgentEvaluationRequest, JSON, IO[bytes]], **kwargs: Any @@ -1576,7 +1580,7 @@ def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: dis response = pipeline_response.http_response - if response.status_code not in [204, 200]: + if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) @@ -2617,6 +2621,7 @@ def __init__(self, *args, **kwargs) -> None: @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) def get(self, name: str, **kwargs: Any) -> _models.RedTeam: """Get a redteam by name. @@ -2686,6 +2691,7 @@ def get(self, name: str, **kwargs: Any) -> _models.RedTeam: @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "client_request_id", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) def list(self, **kwargs: Any) -> ItemPaged["_models.RedTeam"]: """List a redteam by name. @@ -2816,6 +2822,7 @@ def create(self, red_team: IO[bytes], *, content_type: str = "application/json", @api_version_validation( method_added_on="2025-05-15-preview", params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + api_versions_list=["2025-05-15-preview"], ) def create(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwargs: Any) -> _models.RedTeam: """Creates a redteam run. diff --git a/sdk/ai/azure-ai-projects/generated_tests/conftest.py b/sdk/ai/azure-ai-projects/generated_tests/conftest.py new file mode 100644 index 000000000000..dd8e527abab1 --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/conftest.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + aiproject_subscription_id = os.environ.get("AIPROJECT_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + aiproject_tenant_id = os.environ.get("AIPROJECT_TENANT_ID", "00000000-0000-0000-0000-000000000000") + aiproject_client_id = os.environ.get("AIPROJECT_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + aiproject_client_secret = os.environ.get("AIPROJECT_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=aiproject_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=aiproject_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=aiproject_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=aiproject_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations.py new file mode 100644 index 000000000000..d93e0e240cca --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectConnectionsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_connections_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.connections.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations_async.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations_async.py new file mode 100644 index 000000000000..cc08499be0ee --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations_async.py @@ -0,0 +1,23 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectConnectionsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_connections_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.connections.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations.py new file mode 100644 index 000000000000..4df6701f60ef --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectDatasetsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_list_versions(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.list_versions( + name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_get(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.get( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_delete(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.delete( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_create_or_update(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.create_or_update( + name="str", + version="str", + dataset_version={ + "dataUri": "str", + "name": "str", + "type": "uri_file", + "version": "str", + "connectionName": "str", + "description": "str", + "id": "str", + "isReference": bool, + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_pending_upload(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.pending_upload( + name="str", + version="str", + pending_upload_request={"pendingUploadType": "str", "connectionName": "str", "pendingUploadId": "str"}, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_get_credentials(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.get_credentials( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations_async.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations_async.py new file mode 100644 index 000000000000..b14d85963fe0 --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations_async.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectDatasetsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_list_versions(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.datasets.list_versions( + name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.datasets.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_get(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.get( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_delete(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.delete( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_create_or_update(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.create_or_update( + name="str", + version="str", + dataset_version={ + "dataUri": "str", + "name": "str", + "type": "uri_file", + "version": "str", + "connectionName": "str", + "description": "str", + "id": "str", + "isReference": bool, + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_pending_upload(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.pending_upload( + name="str", + version="str", + pending_upload_request={"pendingUploadType": "str", "connectionName": "str", "pendingUploadId": "str"}, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_get_credentials(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.get_credentials( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_deployments_operations.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_deployments_operations.py new file mode 100644 index 000000000000..b0e1e586d866 --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_deployments_operations.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectDeploymentsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_deployments_get(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.deployments.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_deployments_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.deployments.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_deployments_operations_async.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_deployments_operations_async.py new file mode 100644 index 000000000000..3958d83eab29 --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_deployments_operations_async.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectDeploymentsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_deployments_get(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.deployments.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_deployments_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.deployments.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_evaluations_operations.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_evaluations_operations.py new file mode 100644 index 000000000000..e07aa0e02b47 --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_evaluations_operations.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectEvaluationsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluations_get(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluations.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluations_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluations.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluations_create(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluations.create( + evaluation={ + "data": "input_data", + "evaluators": {"str": {"id": "str", "dataMapping": {"str": "str"}, "initParams": {"str": {}}}}, + "id": "str", + "description": "str", + "displayName": "str", + "properties": {"str": "str"}, + "status": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluations_create_agent_evaluation(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluations.create_agent_evaluation( + evaluation={ + "appInsightsConnectionString": "str", + "evaluators": {"str": {"id": "str", "dataMapping": {"str": "str"}, "initParams": {"str": {}}}}, + "runId": "str", + "redactionConfiguration": {"redactScoreProperties": bool}, + "samplingConfiguration": {"maxRequestRate": 0.0, "name": "str", "samplingPercent": 0.0}, + "threadId": "str", + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_evaluations_operations_async.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_evaluations_operations_async.py new file mode 100644 index 000000000000..07f22bd9e58a --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_evaluations_operations_async.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectEvaluationsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluations_get(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluations.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluations_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.evaluations.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluations_create(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluations.create( + evaluation={ + "data": "input_data", + "evaluators": {"str": {"id": "str", "dataMapping": {"str": "str"}, "initParams": {"str": {}}}}, + "id": "str", + "description": "str", + "displayName": "str", + "properties": {"str": "str"}, + "status": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluations_create_agent_evaluation(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluations.create_agent_evaluation( + evaluation={ + "appInsightsConnectionString": "str", + "evaluators": {"str": {"id": "str", "dataMapping": {"str": "str"}, "initParams": {"str": {}}}}, + "runId": "str", + "redactionConfiguration": {"redactScoreProperties": bool}, + "samplingConfiguration": {"maxRequestRate": 0.0, "name": "str", "samplingPercent": 0.0}, + "threadId": "str", + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations.py new file mode 100644 index 000000000000..da540420af3f --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectIndexesOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_list_versions(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.list_versions( + name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_get(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.get( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_delete(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.delete( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_create_or_update(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.create_or_update( + name="str", + version="str", + index={ + "connectionName": "str", + "indexName": "str", + "name": "str", + "type": "AzureSearch", + "version": "str", + "description": "str", + "fieldMapping": { + "contentFields": ["str"], + "filepathField": "str", + "metadataFields": ["str"], + "titleField": "str", + "urlField": "str", + "vectorFields": ["str"], + }, + "id": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations_async.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations_async.py new file mode 100644 index 000000000000..a06d746d144a --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations_async.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectIndexesOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_list_versions(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.indexes.list_versions( + name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.indexes.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_get(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.indexes.get( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_delete(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.indexes.delete( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_create_or_update(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.indexes.create_or_update( + name="str", + version="str", + index={ + "connectionName": "str", + "indexName": "str", + "name": "str", + "type": "AzureSearch", + "version": "str", + "description": "str", + "fieldMapping": { + "contentFields": ["str"], + "filepathField": "str", + "metadataFields": ["str"], + "titleField": "str", + "urlField": "str", + "vectorFields": ["str"], + }, + "id": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_red_teams_operations.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_red_teams_operations.py new file mode 100644 index 000000000000..8cb4893cbb4c --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_red_teams_operations.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectRedTeamsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_red_teams_get(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.red_teams.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_red_teams_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.red_teams.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_red_teams_create(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.red_teams.create( + red_team={ + "id": "str", + "target": "target_config", + "applicationScenario": "str", + "attackStrategies": ["str"], + "displayName": "str", + "numTurns": 0, + "properties": {"str": "str"}, + "riskCategories": ["str"], + "simulationOnly": bool, + "status": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_red_teams_operations_async.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_red_teams_operations_async.py new file mode 100644 index 000000000000..dc93a4d14181 --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_red_teams_operations_async.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectRedTeamsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_red_teams_get(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.red_teams.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_red_teams_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.red_teams.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_red_teams_create(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.red_teams.create( + red_team={ + "id": "str", + "target": "target_config", + "applicationScenario": "str", + "attackStrategies": ["str"], + "displayName": "str", + "numTurns": 0, + "properties": {"str": "str"}, + "riskCategories": ["str"], + "simulationOnly": bool, + "status": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects/generated_tests/testpreparer.py b/sdk/ai/azure-ai-projects/generated_tests/testpreparer.py new file mode 100644 index 000000000000..69c9aaa6e8d1 --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/testpreparer.py @@ -0,0 +1,26 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.ai.projects import AIProjectClient +from devtools_testutils import AzureRecordedTestCase, PowerShellPreparer +import functools + + +class AIProjectClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(AIProjectClient) + return self.create_client_from_credential( + AIProjectClient, + credential=credential, + endpoint=endpoint, + ) + + +AIProjectPreparer = functools.partial( + PowerShellPreparer, "aiproject", aiproject_endpoint="https://fake_aiproject_endpoint.com" +) diff --git a/sdk/ai/azure-ai-projects/generated_tests/testpreparer_async.py b/sdk/ai/azure-ai-projects/generated_tests/testpreparer_async.py new file mode 100644 index 000000000000..56353f9fdd65 --- /dev/null +++ b/sdk/ai/azure-ai-projects/generated_tests/testpreparer_async.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.ai.projects.aio import AIProjectClient +from devtools_testutils import AzureRecordedTestCase + + +class AIProjectClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(AIProjectClient, is_async=True) + return self.create_client_from_credential( + AIProjectClient, + credential=credential, + endpoint=endpoint, + ) diff --git a/sdk/batch/azure-batch/MANIFEST.in b/sdk/batch/azure-batch/MANIFEST.in index cb1e2b1128cb..610802698578 100644 --- a/sdk/batch/azure-batch/MANIFEST.in +++ b/sdk/batch/azure-batch/MANIFEST.in @@ -1,6 +1,5 @@ include *.md include LICENSE -include azure/batch/py.typed +include client/py.typed recursive-include tests *.py recursive-include samples *.py *.md -include azure/__init__.py \ No newline at end of file diff --git a/sdk/batch/azure-batch/_metadata.json b/sdk/batch/azure-batch/_metadata.json new file mode 100644 index 000000000000..f052a1580575 --- /dev/null +++ b/sdk/batch/azure-batch/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "2024-07-01.20.0" +} \ No newline at end of file diff --git a/sdk/batch/azure-batch/apiview-properties.json b/sdk/batch/azure-batch/apiview-properties.json new file mode 100644 index 000000000000..1c3d8561ea7e --- /dev/null +++ b/sdk/batch/azure-batch/apiview-properties.json @@ -0,0 +1,363 @@ +{ + "CrossLanguagePackageId": "Azure.Batch", + "CrossLanguageDefinitionId": { + "azure.batch.models.AffinityInfo": "Azure.Batch.AffinityInfo", + "azure.batch.models.AuthenticationTokenSettings": "Azure.Batch.AuthenticationTokenSettings", + "azure.batch.models.AutomaticOsUpgradePolicy": "Azure.Batch.AutomaticOsUpgradePolicy", + "azure.batch.models.AutoScaleRun": "Azure.Batch.AutoScaleRun", + "azure.batch.models.AutoScaleRunError": "Azure.Batch.AutoScaleRunError", + "azure.batch.models.AutoUserSpecification": "Azure.Batch.AutoUserSpecification", + "azure.batch.models.AzureBlobFileSystemConfiguration": "Azure.Batch.AzureBlobFileSystemConfiguration", + "azure.batch.models.AzureFileShareConfiguration": "Azure.Batch.AzureFileShareConfiguration", + "azure.batch.models.BatchApplication": "Azure.Batch.BatchApplication", + "azure.batch.models.BatchApplicationPackageReference": "Azure.Batch.BatchApplicationPackageReference", + "azure.batch.models.BatchAutoPoolSpecification": "Azure.Batch.BatchAutoPoolSpecification", + "azure.batch.models.BatchCertificate": "Azure.Batch.BatchCertificate", + "azure.batch.models.BatchCertificateReference": "Azure.Batch.BatchCertificateReference", + "azure.batch.models.BatchError": "Azure.Batch.BatchError", + "azure.batch.models.BatchErrorDetail": "Azure.Batch.BatchErrorDetail", + "azure.batch.models.BatchErrorMessage": "Azure.Batch.BatchErrorMessage", + "azure.batch.models.BatchJob": "Azure.Batch.BatchJob", + "azure.batch.models.BatchJobConstraints": "Azure.Batch.BatchJobConstraints", + "azure.batch.models.BatchJobCreateContent": "Azure.Batch.BatchJobCreateContent", + "azure.batch.models.BatchJobDisableContent": "Azure.Batch.BatchJobDisableContent", + "azure.batch.models.BatchJobExecutionInfo": "Azure.Batch.BatchJobExecutionInfo", + "azure.batch.models.BatchJobManagerTask": "Azure.Batch.BatchJobManagerTask", + "azure.batch.models.BatchJobNetworkConfiguration": "Azure.Batch.BatchJobNetworkConfiguration", + "azure.batch.models.BatchJobPreparationAndReleaseTaskStatus": "Azure.Batch.BatchJobPreparationAndReleaseTaskStatus", + "azure.batch.models.BatchJobPreparationTask": "Azure.Batch.BatchJobPreparationTask", + "azure.batch.models.BatchJobPreparationTaskExecutionInfo": "Azure.Batch.BatchJobPreparationTaskExecutionInfo", + "azure.batch.models.BatchJobReleaseTask": "Azure.Batch.BatchJobReleaseTask", + "azure.batch.models.BatchJobReleaseTaskExecutionInfo": "Azure.Batch.BatchJobReleaseTaskExecutionInfo", + "azure.batch.models.BatchJobSchedule": "Azure.Batch.BatchJobSchedule", + "azure.batch.models.BatchJobScheduleConfiguration": "Azure.Batch.BatchJobScheduleConfiguration", + "azure.batch.models.BatchJobScheduleCreateContent": "Azure.Batch.BatchJobScheduleCreateContent", + "azure.batch.models.BatchJobScheduleExecutionInfo": "Azure.Batch.BatchJobScheduleExecutionInfo", + "azure.batch.models.BatchJobScheduleStatistics": "Azure.Batch.BatchJobScheduleStatistics", + "azure.batch.models.BatchJobScheduleUpdateContent": "Azure.Batch.BatchJobScheduleUpdateContent", + "azure.batch.models.BatchJobSchedulingError": "Azure.Batch.BatchJobSchedulingError", + "azure.batch.models.BatchJobSpecification": "Azure.Batch.BatchJobSpecification", + "azure.batch.models.BatchJobStatistics": "Azure.Batch.BatchJobStatistics", + "azure.batch.models.BatchJobTerminateContent": "Azure.Batch.BatchJobTerminateContent", + "azure.batch.models.BatchJobUpdateContent": "Azure.Batch.BatchJobUpdateContent", + "azure.batch.models.BatchNode": "Azure.Batch.BatchNode", + "azure.batch.models.BatchNodeAgentInfo": "Azure.Batch.BatchNodeAgentInfo", + "azure.batch.models.BatchNodeCounts": "Azure.Batch.BatchNodeCounts", + "azure.batch.models.BatchNodeDeallocateContent": "Azure.Batch.BatchNodeDeallocateContent", + "azure.batch.models.BatchNodeDisableSchedulingContent": "Azure.Batch.BatchNodeDisableSchedulingContent", + "azure.batch.models.BatchNodeEndpointConfiguration": "Azure.Batch.BatchNodeEndpointConfiguration", + "azure.batch.models.BatchNodeError": "Azure.Batch.BatchNodeError", + "azure.batch.models.BatchNodeFile": "Azure.Batch.BatchNodeFile", + "azure.batch.models.BatchNodeIdentityReference": "Azure.Batch.BatchNodeIdentityReference", + "azure.batch.models.BatchNodeInfo": "Azure.Batch.BatchNodeInfo", + "azure.batch.models.BatchNodePlacementConfiguration": "Azure.Batch.BatchNodePlacementConfiguration", + "azure.batch.models.BatchNodeRebootContent": "Azure.Batch.BatchNodeRebootContent", + "azure.batch.models.BatchNodeReimageContent": "Azure.Batch.BatchNodeReimageContent", + "azure.batch.models.BatchNodeRemoteLoginSettings": "Azure.Batch.BatchNodeRemoteLoginSettings", + "azure.batch.models.BatchNodeRemoveContent": "Azure.Batch.BatchNodeRemoveContent", + "azure.batch.models.BatchNodeUserCreateContent": "Azure.Batch.BatchNodeUserCreateContent", + "azure.batch.models.BatchNodeUserUpdateContent": "Azure.Batch.BatchNodeUserUpdateContent", + "azure.batch.models.BatchNodeVMExtension": "Azure.Batch.BatchNodeVMExtension", + "azure.batch.models.BatchPool": "Azure.Batch.BatchPool", + "azure.batch.models.BatchPoolCreateContent": "Azure.Batch.BatchPoolCreateContent", + "azure.batch.models.BatchPoolEnableAutoScaleContent": "Azure.Batch.BatchPoolEnableAutoScaleContent", + "azure.batch.models.BatchPoolEndpointConfiguration": "Azure.Batch.BatchPoolEndpointConfiguration", + "azure.batch.models.BatchPoolEvaluateAutoScaleContent": "Azure.Batch.BatchPoolEvaluateAutoScaleContent", + "azure.batch.models.BatchPoolIdentity": "Azure.Batch.BatchPoolIdentity", + "azure.batch.models.BatchPoolInfo": "Azure.Batch.BatchPoolInfo", + "azure.batch.models.BatchPoolNodeCounts": "Azure.Batch.BatchPoolNodeCounts", + "azure.batch.models.BatchPoolReplaceContent": "Azure.Batch.BatchPoolReplaceContent", + "azure.batch.models.BatchPoolResizeContent": "Azure.Batch.BatchPoolResizeContent", + "azure.batch.models.BatchPoolResourceStatistics": "Azure.Batch.BatchPoolResourceStatistics", + "azure.batch.models.BatchPoolSpecification": "Azure.Batch.BatchPoolSpecification", + "azure.batch.models.BatchPoolStatistics": "Azure.Batch.BatchPoolStatistics", + "azure.batch.models.BatchPoolUpdateContent": "Azure.Batch.BatchPoolUpdateContent", + "azure.batch.models.BatchPoolUsageMetrics": "Azure.Batch.BatchPoolUsageMetrics", + "azure.batch.models.BatchPoolUsageStatistics": "Azure.Batch.BatchPoolUsageStatistics", + "azure.batch.models.BatchStartTask": "Azure.Batch.BatchStartTask", + "azure.batch.models.BatchStartTaskInfo": "Azure.Batch.BatchStartTaskInfo", + "azure.batch.models.BatchSubtask": "Azure.Batch.BatchSubtask", + "azure.batch.models.BatchSupportedImage": "Azure.Batch.BatchSupportedImage", + "azure.batch.models.BatchTask": "Azure.Batch.BatchTask", + "azure.batch.models.BatchTaskAddCollectionResult": "Azure.Batch.BatchTaskAddCollectionResult", + "azure.batch.models.BatchTaskAddResult": "Azure.Batch.BatchTaskAddResult", + "azure.batch.models.BatchTaskConstraints": "Azure.Batch.BatchTaskConstraints", + "azure.batch.models.BatchTaskContainerExecutionInfo": "Azure.Batch.BatchTaskContainerExecutionInfo", + "azure.batch.models.BatchTaskContainerSettings": "Azure.Batch.BatchTaskContainerSettings", + "azure.batch.models.BatchTaskCounts": "Azure.Batch.BatchTaskCounts", + "azure.batch.models.BatchTaskCountsResult": "Azure.Batch.BatchTaskCountsResult", + "azure.batch.models.BatchTaskCreateContent": "Azure.Batch.BatchTaskCreateContent", + "azure.batch.models.BatchTaskDependencies": "Azure.Batch.BatchTaskDependencies", + "azure.batch.models.BatchTaskExecutionInfo": "Azure.Batch.BatchTaskExecutionInfo", + "azure.batch.models.BatchTaskFailureInfo": "Azure.Batch.BatchTaskFailureInfo", + "azure.batch.models.BatchTaskGroup": "Azure.Batch.BatchTaskGroup", + "azure.batch.models.BatchTaskIdRange": "Azure.Batch.BatchTaskIdRange", + "azure.batch.models.BatchTaskInfo": "Azure.Batch.BatchTaskInfo", + "azure.batch.models.BatchTaskSchedulingPolicy": "Azure.Batch.BatchTaskSchedulingPolicy", + "azure.batch.models.BatchTaskSlotCounts": "Azure.Batch.BatchTaskSlotCounts", + "azure.batch.models.BatchTaskStatistics": "Azure.Batch.BatchTaskStatistics", + "azure.batch.models.CifsMountConfiguration": "Azure.Batch.CifsMountConfiguration", + "azure.batch.models.ContainerConfiguration": "Azure.Batch.ContainerConfiguration", + "azure.batch.models.ContainerHostBatchBindMountEntry": "Azure.Batch.ContainerHostBatchBindMountEntry", + "azure.batch.models.ContainerRegistryReference": "Azure.Batch.ContainerRegistryReference", + "azure.batch.models.DataDisk": "Azure.Batch.DataDisk", + "azure.batch.models.DeleteBatchCertificateError": "Azure.Batch.DeleteBatchCertificateError", + "azure.batch.models.DiffDiskSettings": "Azure.Batch.DiffDiskSettings", + "azure.batch.models.DiskEncryptionConfiguration": "Azure.Batch.DiskEncryptionConfiguration", + "azure.batch.models.EnvironmentSetting": "Azure.Batch.EnvironmentSetting", + "azure.batch.models.ExitCodeMapping": "Azure.Batch.ExitCodeMapping", + "azure.batch.models.ExitCodeRangeMapping": "Azure.Batch.ExitCodeRangeMapping", + "azure.batch.models.ExitConditions": "Azure.Batch.ExitConditions", + "azure.batch.models.ExitOptions": "Azure.Batch.ExitOptions", + "azure.batch.models.FileProperties": "Azure.Batch.FileProperties", + "azure.batch.models.HttpHeader": "Azure.Batch.HttpHeader", + "azure.batch.models.ImageReference": "Azure.Batch.ImageReference", + "azure.batch.models.InboundEndpoint": "Azure.Batch.InboundEndpoint", + "azure.batch.models.InboundNatPool": "Azure.Batch.InboundNatPool", + "azure.batch.models.InstanceViewStatus": "Azure.Batch.InstanceViewStatus", + "azure.batch.models.LinuxUserConfiguration": "Azure.Batch.LinuxUserConfiguration", + "azure.batch.models.ManagedDisk": "Azure.Batch.ManagedDisk", + "azure.batch.models.MetadataItem": "Azure.Batch.MetadataItem", + "azure.batch.models.MountConfiguration": "Azure.Batch.MountConfiguration", + "azure.batch.models.MultiInstanceSettings": "Azure.Batch.MultiInstanceSettings", + "azure.batch.models.NameValuePair": "Azure.Batch.NameValuePair", + "azure.batch.models.NetworkConfiguration": "Azure.Batch.NetworkConfiguration", + "azure.batch.models.NetworkSecurityGroupRule": "Azure.Batch.NetworkSecurityGroupRule", + "azure.batch.models.NfsMountConfiguration": "Azure.Batch.NfsMountConfiguration", + "azure.batch.models.OSDisk": "Azure.Batch.OSDisk", + "azure.batch.models.OutputFile": "Azure.Batch.OutputFile", + "azure.batch.models.OutputFileBlobContainerDestination": "Azure.Batch.OutputFileBlobContainerDestination", + "azure.batch.models.OutputFileDestination": "Azure.Batch.OutputFileDestination", + "azure.batch.models.OutputFileUploadConfig": "Azure.Batch.OutputFileUploadConfig", + "azure.batch.models.PublicIpAddressConfiguration": "Azure.Batch.PublicIpAddressConfiguration", + "azure.batch.models.RecentBatchJob": "Azure.Batch.RecentBatchJob", + "azure.batch.models.ResizeError": "Azure.Batch.ResizeError", + "azure.batch.models.ResourceFile": "Azure.Batch.ResourceFile", + "azure.batch.models.RollingUpgradePolicy": "Azure.Batch.RollingUpgradePolicy", + "azure.batch.models.SecurityProfile": "Azure.Batch.SecurityProfile", + "azure.batch.models.ServiceArtifactReference": "Azure.Batch.ServiceArtifactReference", + "azure.batch.models.UefiSettings": "Azure.Batch.UefiSettings", + "azure.batch.models.UpgradePolicy": "Azure.Batch.UpgradePolicy", + "azure.batch.models.UploadBatchServiceLogsContent": "Azure.Batch.UploadBatchServiceLogsContent", + "azure.batch.models.UploadBatchServiceLogsResult": "Azure.Batch.UploadBatchServiceLogsResult", + "azure.batch.models.UserAccount": "Azure.Batch.UserAccount", + "azure.batch.models.UserAssignedIdentity": "Azure.Batch.UserAssignedIdentity", + "azure.batch.models.UserIdentity": "Azure.Batch.UserIdentity", + "azure.batch.models.VirtualMachineConfiguration": "Azure.Batch.VirtualMachineConfiguration", + "azure.batch.models.VirtualMachineInfo": "Azure.Batch.VirtualMachineInfo", + "azure.batch.models.VMDiskSecurityProfile": "Azure.Batch.VMDiskSecurityProfile", + "azure.batch.models.VMExtension": "Azure.Batch.VMExtension", + "azure.batch.models.VMExtensionInstanceView": "Azure.Batch.VMExtensionInstanceView", + "azure.batch.models.WindowsConfiguration": "Azure.Batch.WindowsConfiguration", + "azure.batch.models.WindowsUserConfiguration": "Azure.Batch.WindowsUserConfiguration", + "client.models.CachingType": "Azure.Batch.CachingType", + "client.models.StorageAccountType": "Azure.Batch.StorageAccountType", + "client.models.ContainerType": "Azure.Batch.ContainerType", + "client.models.DiskEncryptionTarget": "Azure.Batch.DiskEncryptionTarget", + "client.models.BatchNodePlacementPolicyType": "Azure.Batch.BatchNodePlacementPolicyType", + "client.models.DiffDiskPlacement": "Azure.Batch.DiffDiskPlacement", + "client.models.SecurityEncryptionTypes": "Azure.Batch.SecurityEncryptionTypes", + "client.models.SecurityTypes": "Azure.Batch.SecurityTypes", + "client.models.DynamicVNetAssignmentScope": "Azure.Batch.DynamicVNetAssignmentScope", + "client.models.InboundEndpointProtocol": "Azure.Batch.InboundEndpointProtocol", + "client.models.NetworkSecurityGroupRuleAccess": "Azure.Batch.NetworkSecurityGroupRuleAccess", + "client.models.IpAddressProvisioningType": "Azure.Batch.IpAddressProvisioningType", + "client.models.ContainerWorkingDirectory": "Azure.Batch.ContainerWorkingDirectory", + "client.models.ContainerHostDataPath": "Azure.Batch.ContainerHostDataPath", + "client.models.AutoUserScope": "Azure.Batch.AutoUserScope", + "client.models.ElevationLevel": "Azure.Batch.ElevationLevel", + "client.models.BatchCertificateStoreLocation": "Azure.Batch.BatchCertificateStoreLocation", + "client.models.BatchCertificateVisibility": "Azure.Batch.BatchCertificateVisibility", + "client.models.BatchNodeFillType": "Azure.Batch.BatchNodeFillType", + "client.models.LoginMode": "Azure.Batch.LoginMode", + "client.models.BatchNodeCommunicationMode": "Azure.Batch.BatchNodeCommunicationMode", + "client.models.UpgradeMode": "Azure.Batch.UpgradeMode", + "client.models.BatchPoolState": "Azure.Batch.BatchPoolState", + "client.models.AllocationState": "Azure.Batch.AllocationState", + "client.models.BatchPoolIdentityType": "Azure.Batch.BatchPoolIdentityType", + "client.models.BatchNodeDeallocationOption": "Azure.Batch.BatchNodeDeallocationOption", + "client.models.OSType": "Azure.Batch.OSType", + "client.models.ImageVerificationType": "Azure.Batch.ImageVerificationType", + "client.models.BatchJobState": "Azure.Batch.BatchJobState", + "client.models.OutputFileUploadCondition": "Azure.Batch.OutputFileUploadCondition", + "client.models.AccessScope": "Azure.Batch.AccessScope", + "client.models.BatchPoolLifetimeOption": "Azure.Batch.BatchPoolLifetimeOption", + "client.models.OnAllBatchTasksComplete": "Azure.Batch.OnAllBatchTasksComplete", + "client.models.OnBatchTaskFailure": "Azure.Batch.OnBatchTaskFailure", + "client.models.ErrorCategory": "Azure.Batch.ErrorCategory", + "client.models.DisableBatchJobOption": "Azure.Batch.DisableBatchJobOption", + "client.models.BatchJobPreparationTaskState": "Azure.Batch.BatchJobPreparationTaskState", + "client.models.BatchTaskExecutionResult": "Azure.Batch.BatchTaskExecutionResult", + "client.models.BatchJobReleaseTaskState": "Azure.Batch.BatchJobReleaseTaskState", + "client.models.BatchCertificateState": "Azure.Batch.BatchCertificateState", + "client.models.BatchCertificateFormat": "Azure.Batch.BatchCertificateFormat", + "client.models.BatchJobScheduleState": "Azure.Batch.BatchJobScheduleState", + "client.models.BatchJobAction": "Azure.Batch.BatchJobAction", + "client.models.DependencyAction": "Azure.Batch.DependencyAction", + "client.models.BatchTaskState": "Azure.Batch.BatchTaskState", + "client.models.BatchTaskAddStatus": "Azure.Batch.BatchTaskAddStatus", + "client.models.BatchSubtaskState": "Azure.Batch.BatchSubtaskState", + "client.models.BatchNodeState": "Azure.Batch.BatchNodeState", + "client.models.SchedulingState": "Azure.Batch.SchedulingState", + "client.models.BatchStartTaskState": "Azure.Batch.BatchStartTaskState", + "client.models.BatchNodeRebootOption": "Azure.Batch.BatchNodeRebootOption", + "client.models.BatchNodeReimageOption": "Azure.Batch.BatchNodeReimageOption", + "client.models.BatchNodeDeallocateOption": "Azure.Batch.BatchNodeDeallocateOption", + "client.models.BatchNodeDisableSchedulingOption": "Azure.Batch.BatchNodeDisableSchedulingOption", + "client.models.StatusLevelTypes": "Azure.Batch.StatusLevelTypes", + "client.BatchClient.list_applications": "Client.BatchClient.listApplications", + "client.aio.BatchClient.list_applications": "Client.BatchClient.listApplications", + "client.BatchClient.get_application": "Client.BatchClient.getApplication", + "client.aio.BatchClient.get_application": "Client.BatchClient.getApplication", + "client.BatchClient.list_pool_usage_metrics": "Client.BatchClient.listPoolUsageMetrics", + "client.aio.BatchClient.list_pool_usage_metrics": "Client.BatchClient.listPoolUsageMetrics", + "client.BatchClient.create_pool": "Client.BatchClient.createPool", + "client.aio.BatchClient.create_pool": "Client.BatchClient.createPool", + "client.BatchClient.list_pools": "Client.BatchClient.listPools", + "client.aio.BatchClient.list_pools": "Client.BatchClient.listPools", + "client.BatchClient.delete_pool": "Client.BatchClient.deletePool", + "client.aio.BatchClient.delete_pool": "Client.BatchClient.deletePool", + "client.BatchClient.pool_exists": "Client.BatchClient.poolExists", + "client.aio.BatchClient.pool_exists": "Client.BatchClient.poolExists", + "client.BatchClient.get_pool": "Client.BatchClient.getPool", + "client.aio.BatchClient.get_pool": "Client.BatchClient.getPool", + "client.BatchClient.update_pool": "Client.BatchClient.updatePool", + "client.aio.BatchClient.update_pool": "Client.BatchClient.updatePool", + "client.BatchClient.disable_pool_auto_scale": "Client.BatchClient.disablePoolAutoScale", + "client.aio.BatchClient.disable_pool_auto_scale": "Client.BatchClient.disablePoolAutoScale", + "client.BatchClient.enable_pool_auto_scale": "Client.BatchClient.enablePoolAutoScale", + "client.aio.BatchClient.enable_pool_auto_scale": "Client.BatchClient.enablePoolAutoScale", + "client.BatchClient.evaluate_pool_auto_scale": "Client.BatchClient.evaluatePoolAutoScale", + "client.aio.BatchClient.evaluate_pool_auto_scale": "Client.BatchClient.evaluatePoolAutoScale", + "client.BatchClient.resize_pool": "Client.BatchClient.resizePool", + "client.aio.BatchClient.resize_pool": "Client.BatchClient.resizePool", + "client.BatchClient.stop_pool_resize": "Client.BatchClient.stopPoolResize", + "client.aio.BatchClient.stop_pool_resize": "Client.BatchClient.stopPoolResize", + "client.BatchClient.replace_pool_properties": "Client.BatchClient.replacePoolProperties", + "client.aio.BatchClient.replace_pool_properties": "Client.BatchClient.replacePoolProperties", + "client.BatchClient.remove_nodes": "Client.BatchClient.removeNodes", + "client.aio.BatchClient.remove_nodes": "Client.BatchClient.removeNodes", + "client.BatchClient.list_supported_images": "Client.BatchClient.listSupportedImages", + "client.aio.BatchClient.list_supported_images": "Client.BatchClient.listSupportedImages", + "client.BatchClient.list_pool_node_counts": "Client.BatchClient.listPoolNodeCounts", + "client.aio.BatchClient.list_pool_node_counts": "Client.BatchClient.listPoolNodeCounts", + "client.BatchClient.delete_job": "Client.BatchClient.deleteJob", + "client.aio.BatchClient.delete_job": "Client.BatchClient.deleteJob", + "client.BatchClient.get_job": "Client.BatchClient.getJob", + "client.aio.BatchClient.get_job": "Client.BatchClient.getJob", + "client.BatchClient.update_job": "Client.BatchClient.updateJob", + "client.aio.BatchClient.update_job": "Client.BatchClient.updateJob", + "client.BatchClient.replace_job": "Client.BatchClient.replaceJob", + "client.aio.BatchClient.replace_job": "Client.BatchClient.replaceJob", + "client.BatchClient.disable_job": "Client.BatchClient.disableJob", + "client.aio.BatchClient.disable_job": "Client.BatchClient.disableJob", + "client.BatchClient.enable_job": "Client.BatchClient.enableJob", + "client.aio.BatchClient.enable_job": "Client.BatchClient.enableJob", + "client.BatchClient.terminate_job": "Client.BatchClient.terminateJob", + "client.aio.BatchClient.terminate_job": "Client.BatchClient.terminateJob", + "client.BatchClient.create_job": "Client.BatchClient.createJob", + "client.aio.BatchClient.create_job": "Client.BatchClient.createJob", + "client.BatchClient.list_jobs": "Client.BatchClient.listJobs", + "client.aio.BatchClient.list_jobs": "Client.BatchClient.listJobs", + "client.BatchClient.list_jobs_from_schedule": "Client.BatchClient.listJobsFromSchedule", + "client.aio.BatchClient.list_jobs_from_schedule": "Client.BatchClient.listJobsFromSchedule", + "client.BatchClient.list_job_preparation_and_release_task_status": "Client.BatchClient.listJobPreparationAndReleaseTaskStatus", + "client.aio.BatchClient.list_job_preparation_and_release_task_status": "Client.BatchClient.listJobPreparationAndReleaseTaskStatus", + "client.BatchClient.get_job_task_counts": "Client.BatchClient.getJobTaskCounts", + "client.aio.BatchClient.get_job_task_counts": "Client.BatchClient.getJobTaskCounts", + "client.BatchClient.create_certificate": "Client.BatchClient.createCertificate", + "client.aio.BatchClient.create_certificate": "Client.BatchClient.createCertificate", + "client.BatchClient.list_certificates": "Client.BatchClient.listCertificates", + "client.aio.BatchClient.list_certificates": "Client.BatchClient.listCertificates", + "client.BatchClient.cancel_certificate_deletion": "Client.BatchClient.cancelCertificateDeletion", + "client.aio.BatchClient.cancel_certificate_deletion": "Client.BatchClient.cancelCertificateDeletion", + "client.BatchClient.delete_certificate": "Client.BatchClient.deleteCertificate", + "client.aio.BatchClient.delete_certificate": "Client.BatchClient.deleteCertificate", + "client.BatchClient.get_certificate": "Client.BatchClient.getCertificate", + "client.aio.BatchClient.get_certificate": "Client.BatchClient.getCertificate", + "client.BatchClient.job_schedule_exists": "Client.BatchClient.jobScheduleExists", + "client.aio.BatchClient.job_schedule_exists": "Client.BatchClient.jobScheduleExists", + "client.BatchClient.delete_job_schedule": "Client.BatchClient.deleteJobSchedule", + "client.aio.BatchClient.delete_job_schedule": "Client.BatchClient.deleteJobSchedule", + "client.BatchClient.get_job_schedule": "Client.BatchClient.getJobSchedule", + "client.aio.BatchClient.get_job_schedule": "Client.BatchClient.getJobSchedule", + "client.BatchClient.update_job_schedule": "Client.BatchClient.updateJobSchedule", + "client.aio.BatchClient.update_job_schedule": "Client.BatchClient.updateJobSchedule", + "client.BatchClient.replace_job_schedule": "Client.BatchClient.replaceJobSchedule", + "client.aio.BatchClient.replace_job_schedule": "Client.BatchClient.replaceJobSchedule", + "client.BatchClient.disable_job_schedule": "Client.BatchClient.disableJobSchedule", + "client.aio.BatchClient.disable_job_schedule": "Client.BatchClient.disableJobSchedule", + "client.BatchClient.enable_job_schedule": "Client.BatchClient.enableJobSchedule", + "client.aio.BatchClient.enable_job_schedule": "Client.BatchClient.enableJobSchedule", + "client.BatchClient.terminate_job_schedule": "Client.BatchClient.terminateJobSchedule", + "client.aio.BatchClient.terminate_job_schedule": "Client.BatchClient.terminateJobSchedule", + "client.BatchClient.create_job_schedule": "Client.BatchClient.createJobSchedule", + "client.aio.BatchClient.create_job_schedule": "Client.BatchClient.createJobSchedule", + "client.BatchClient.list_job_schedules": "Client.BatchClient.listJobSchedules", + "client.aio.BatchClient.list_job_schedules": "Client.BatchClient.listJobSchedules", + "client.BatchClient.create_task": "Client.BatchClient.createTask", + "client.aio.BatchClient.create_task": "Client.BatchClient.createTask", + "client.BatchClient.list_tasks": "Client.BatchClient.listTasks", + "client.aio.BatchClient.list_tasks": "Client.BatchClient.listTasks", + "client.BatchClient.create_task_collection": "Client.BatchClient.createTaskCollection", + "client.aio.BatchClient.create_task_collection": "Client.BatchClient.createTaskCollection", + "client.BatchClient.delete_task": "Client.BatchClient.deleteTask", + "client.aio.BatchClient.delete_task": "Client.BatchClient.deleteTask", + "client.BatchClient.get_task": "Client.BatchClient.getTask", + "client.aio.BatchClient.get_task": "Client.BatchClient.getTask", + "client.BatchClient.replace_task": "Client.BatchClient.replaceTask", + "client.aio.BatchClient.replace_task": "Client.BatchClient.replaceTask", + "client.BatchClient.list_sub_tasks": "Client.BatchClient.listSubTasks", + "client.aio.BatchClient.list_sub_tasks": "Client.BatchClient.listSubTasks", + "client.BatchClient.terminate_task": "Client.BatchClient.terminateTask", + "client.aio.BatchClient.terminate_task": "Client.BatchClient.terminateTask", + "client.BatchClient.reactivate_task": "Client.BatchClient.reactivateTask", + "client.aio.BatchClient.reactivate_task": "Client.BatchClient.reactivateTask", + "client.BatchClient.delete_task_file": "Client.BatchClient.deleteTaskFile", + "client.aio.BatchClient.delete_task_file": "Client.BatchClient.deleteTaskFile", + "client.BatchClient.get_task_file": "Client.BatchClient.getTaskFile", + "client.aio.BatchClient.get_task_file": "Client.BatchClient.getTaskFile", + "client.BatchClient.get_task_file_properties": "Client.BatchClient.getTaskFileProperties", + "client.aio.BatchClient.get_task_file_properties": "Client.BatchClient.getTaskFileProperties", + "client.BatchClient.list_task_files": "Client.BatchClient.listTaskFiles", + "client.aio.BatchClient.list_task_files": "Client.BatchClient.listTaskFiles", + "client.BatchClient.create_node_user": "Client.BatchClient.createNodeUser", + "client.aio.BatchClient.create_node_user": "Client.BatchClient.createNodeUser", + "client.BatchClient.delete_node_user": "Client.BatchClient.deleteNodeUser", + "client.aio.BatchClient.delete_node_user": "Client.BatchClient.deleteNodeUser", + "client.BatchClient.replace_node_user": "Client.BatchClient.replaceNodeUser", + "client.aio.BatchClient.replace_node_user": "Client.BatchClient.replaceNodeUser", + "client.BatchClient.get_node": "Client.BatchClient.getNode", + "client.aio.BatchClient.get_node": "Client.BatchClient.getNode", + "client.BatchClient.reboot_node": "Client.BatchClient.rebootNode", + "client.aio.BatchClient.reboot_node": "Client.BatchClient.rebootNode", + "client.BatchClient.start_node": "Client.BatchClient.startNode", + "client.aio.BatchClient.start_node": "Client.BatchClient.startNode", + "client.BatchClient.reimage_node": "Client.BatchClient.reimageNode", + "client.aio.BatchClient.reimage_node": "Client.BatchClient.reimageNode", + "client.BatchClient.deallocate_node": "Client.BatchClient.deallocateNode", + "client.aio.BatchClient.deallocate_node": "Client.BatchClient.deallocateNode", + "client.BatchClient.disable_node_scheduling": "Client.BatchClient.disableNodeScheduling", + "client.aio.BatchClient.disable_node_scheduling": "Client.BatchClient.disableNodeScheduling", + "client.BatchClient.enable_node_scheduling": "Client.BatchClient.enableNodeScheduling", + "client.aio.BatchClient.enable_node_scheduling": "Client.BatchClient.enableNodeScheduling", + "client.BatchClient.get_node_remote_login_settings": "Client.BatchClient.getNodeRemoteLoginSettings", + "client.aio.BatchClient.get_node_remote_login_settings": "Client.BatchClient.getNodeRemoteLoginSettings", + "client.BatchClient.upload_node_logs": "Client.BatchClient.uploadNodeLogs", + "client.aio.BatchClient.upload_node_logs": "Client.BatchClient.uploadNodeLogs", + "client.BatchClient.list_nodes": "Client.BatchClient.listNodes", + "client.aio.BatchClient.list_nodes": "Client.BatchClient.listNodes", + "client.BatchClient.get_node_extension": "Client.BatchClient.getNodeExtension", + "client.aio.BatchClient.get_node_extension": "Client.BatchClient.getNodeExtension", + "client.BatchClient.list_node_extensions": "Client.BatchClient.listNodeExtensions", + "client.aio.BatchClient.list_node_extensions": "Client.BatchClient.listNodeExtensions", + "client.BatchClient.delete_node_file": "Client.BatchClient.deleteNodeFile", + "client.aio.BatchClient.delete_node_file": "Client.BatchClient.deleteNodeFile", + "client.BatchClient.get_node_file": "Client.BatchClient.getNodeFile", + "client.aio.BatchClient.get_node_file": "Client.BatchClient.getNodeFile", + "client.BatchClient.get_node_file_properties": "Client.BatchClient.getNodeFileProperties", + "client.aio.BatchClient.get_node_file_properties": "Client.BatchClient.getNodeFileProperties", + "client.BatchClient.list_node_files": "Client.BatchClient.listNodeFiles", + "client.aio.BatchClient.list_node_files": "Client.BatchClient.listNodeFiles" + } +} \ No newline at end of file diff --git a/sdk/batch/azure-batch/azure/batch/__init__.py b/sdk/batch/azure-batch/azure/batch/__init__.py index a02f2bbd5c47..d55ccad1f573 100644 --- a/sdk/batch/azure-batch/azure/batch/__init__.py +++ b/sdk/batch/azure-batch/azure/batch/__init__.py @@ -1,32 +1 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._client import BatchClient # type: ignore -from ._version import VERSION - -__version__ = VERSION - -try: - from ._patch import __all__ as _patch_all - from ._patch import * -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "BatchClient", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore - -_patch_sdk() +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/batch/azure-batch/azure/batch/_operations/_patch.py b/sdk/batch/azure-batch/azure/batch/_operations/_patch.py index 4e0857b30791..d5af410f3a77 100644 --- a/sdk/batch/azure-batch/azure/batch/_operations/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/_operations/_patch.py @@ -91,9 +91,7 @@ def create_tasks( # deque operations(append/pop) are thread-safe results_queue: Deque[_models.BatchTaskAddResult] = collections.deque() - task_workflow_manager = _TaskWorkflowManager( - self, job_id=job_id, task_collection=task_collection, **kwargs - ) + task_workflow_manager = _TaskWorkflowManager(self, job_id=job_id, task_collection=task_collection, **kwargs) # multi-threaded behavior if concurrencies: @@ -241,18 +239,19 @@ def get_node_file_properties( creation_time=headers["ocp-creation-time"], # content_type=headers["Content-Type"], # need to add to typespec file_mode=headers["ocp-batch-file-mode"], - ) - - get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore - pool_id, - node_id, - file_path, - timeout=timeout, - ocpdate=ocpdate, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, + ) + + get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore + pool_id, + node_id, + file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, cls=cls, - **kwargs) + **kwargs + ) return get_response @@ -309,18 +308,19 @@ def get_task_file_properties( creation_time=headers["ocp-creation-time"], # content_type=headers["Content-Type"], # need to add to typespec file_mode=headers["ocp-batch-file-mode"], - ) - - get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore - job_id, - task_id, - file_path, - timeout=timeout, - ocpdate=ocpdate, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, + ) + + get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore + job_id, + task_id, + file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, cls=cls, - **kwargs) + **kwargs + ) return get_response @@ -523,9 +523,8 @@ def _bulk_add_tasks(self, results_queue, chunk_tasks_to_add): for task in chunk_tasks_to_add: if task.id == task_result.task_id: self.tasks_to_add.appendleft(task) - elif ( - task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR - and not (task_result.error and task_result.error.code == "TaskExists") + elif task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR and not ( + task_result.error and task_result.error.code == "TaskExists" ): # Client error will be recorded unless Task already exists self.failure_tasks.appendleft(task_result) diff --git a/sdk/batch/azure-batch/azure/batch/_patch.py b/sdk/batch/azure-batch/azure/batch/_patch.py index a9f1f6eeca2a..580a19463cac 100644 --- a/sdk/batch/azure-batch/azure/batch/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/_patch.py @@ -146,7 +146,7 @@ class BatchClient(GenerateBatchClient): def __init__(self, endpoint: str, credential: Union[AzureNamedKeyCredential, TokenCredential], **kwargs): super().__init__( endpoint=endpoint, - credential=credential, # type: ignore + credential=credential, # type: ignore authentication_policy=kwargs.pop( "authentication_policy", self._format_shared_key_credential("", credential) ), diff --git a/sdk/batch/azure-batch/azure/batch/aio/_operations/_patch.py b/sdk/batch/azure-batch/azure/batch/aio/_operations/_patch.py index b3c12ac94cfb..a449b8db8a5d 100644 --- a/sdk/batch/azure-batch/azure/batch/aio/_operations/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/aio/_operations/_patch.py @@ -89,9 +89,7 @@ async def create_tasks( kwargs.update({"timeout": timeout, "ocpdate": ocpdate}) results_queue: Deque[_models.BatchTaskAddResult] = collections.deque() - task_workflow_manager = _TaskWorkflowManager( - self, job_id=job_id, task_collection=task_collection, **kwargs - ) + task_workflow_manager = _TaskWorkflowManager(self, job_id=job_id, task_collection=task_collection, **kwargs) if concurrencies: if concurrencies < 0: @@ -230,18 +228,19 @@ async def get_node_file_properties( creation_time=headers["ocp-creation-time"], # content_type=headers["Content-Type"], # need to add to typespec file_mode=headers["ocp-batch-file-mode"], - ) - - get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore - pool_id, - node_id, - file_path, - timeout=timeout, - ocpdate=ocpdate, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, + ) + + get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore + pool_id, + node_id, + file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, cls=cls, - **kwargs) + **kwargs + ) return get_response @@ -298,18 +297,19 @@ async def get_task_file_properties( creation_time=headers["ocp-creation-time"], # content_type=headers["Content-Type"], # need to add to typespec file_mode=headers["ocp-batch-file-mode"], - ) - - get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore - job_id, - task_id, - file_path, - timeout=timeout, - ocpdate=ocpdate, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, + ) + + get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore + job_id, + task_id, + file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, cls=cls, - **kwargs) + **kwargs + ) return get_response @@ -473,7 +473,7 @@ async def _bulk_add_tasks( self.tasks_to_add.extendleft(chunk_tasks_to_add[midpoint:]) await self._bulk_add_tasks(results_queue, chunk_tasks_to_add[:midpoint]) # Retry server side errors - elif 500 <= e.response.status_code <= 599: # type: ignore + elif 500 <= e.response.status_code <= 599: # type: ignore self.tasks_to_add.extendleft(chunk_tasks_to_add) else: # Re-add to pending queue as unknown status / don't have result @@ -493,9 +493,8 @@ async def _bulk_add_tasks( for task in chunk_tasks_to_add: if task.id == task_result.task_id: self.tasks_to_add.appendleft(task) - elif ( - task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR - and not (task_result.error and task_result.error.code == "TaskExists") + elif task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR and not ( + task_result.error and task_result.error.code == "TaskExists" ): # Client error will be recorded unless Task already exists self.failure_tasks.appendleft(task_result) diff --git a/sdk/batch/azure-batch/azure/batch/aio/_patch.py b/sdk/batch/azure-batch/azure/batch/aio/_patch.py index 64a3f1262c22..082f6643f95a 100644 --- a/sdk/batch/azure-batch/azure/batch/aio/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/aio/_patch.py @@ -41,7 +41,7 @@ class BatchClient(GenerateBatchClient): def __init__(self, endpoint: str, credential: Union[AzureNamedKeyCredential, TokenCredential], **kwargs): super().__init__( endpoint=endpoint, - credential=credential, # type: ignore + credential=credential, # type: ignore authentication_policy=kwargs.pop("authentication_policy", self._format_shared_key_credential(credential)), **kwargs ) diff --git a/sdk/batch/azure-batch/azure/batch/models/__init__.py b/sdk/batch/azure-batch/azure/batch/models/__init__.py index c54aed6d5845..393ee8aedeb0 100644 --- a/sdk/batch/azure-batch/azure/batch/models/__init__.py +++ b/sdk/batch/azure-batch/azure/batch/models/__init__.py @@ -123,7 +123,6 @@ ExitConditions, ExitOptions, FileProperties, - GetCertificateResponse, HttpHeader, ImageReference, InboundEndpoint, @@ -337,7 +336,6 @@ "ExitConditions", "ExitOptions", "FileProperties", - "GetCertificateResponse", "HttpHeader", "ImageReference", "InboundEndpoint", diff --git a/sdk/batch/azure-batch/azure/batch/models/_models.py b/sdk/batch/azure-batch/azure/batch/models/_models.py index 842decf5de5c..8a70f83a8da1 100644 --- a/sdk/batch/azure-batch/azure/batch/models/_models.py +++ b/sdk/batch/azure-batch/azure/batch/models/_models.py @@ -11,14 +11,13 @@ import datetime from typing import Any, Dict, List, Mapping, Optional, TYPE_CHECKING, Union, overload -from .. import _model_base -from .._model_base import rest_field +from ....client._utils.model_base import Model as _Model, rest_field if TYPE_CHECKING: from .. import models as _models -class AffinityInfo(_model_base.Model): +class AffinityInfo(_Model): """A locality hint that can be used by the Batch service to select a Compute Node on which to start a Task. @@ -54,7 +53,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AuthenticationTokenSettings(_model_base.Model): +class AuthenticationTokenSettings(_Model): """The settings for an authentication token that the Task can use to perform Batch service operations. @@ -91,7 +90,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AutomaticOsUpgradePolicy(_model_base.Model): +class AutomaticOsUpgradePolicy(_Model): """The configuration parameters used for performing automatic OS upgrade. :ivar disable_automatic_rollback: Whether OS image rollback feature should be disabled. @@ -154,7 +153,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AutoScaleRun(_model_base.Model): +class AutoScaleRun(_Model): """The results and errors from an execution of a Pool autoscale formula. :ivar timestamp: The time at which the autoscale formula was last evaluated. Required. @@ -202,7 +201,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AutoScaleRunError(_model_base.Model): +class AutoScaleRunError(_Model): """An error that occurred when executing or evaluating a Pool autoscale formula. :ivar code: An identifier for the autoscale error. Codes are invariant and are intended to be @@ -246,7 +245,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AutoUserSpecification(_model_base.Model): +class AutoUserSpecification(_Model): """Specifies the options for the auto user that runs an Azure Batch Task. :ivar scope: The scope for the auto user. The default value is pool. If the pool is running @@ -293,7 +292,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AzureBlobFileSystemConfiguration(_model_base.Model): +class AzureBlobFileSystemConfiguration(_Model): """Information used to connect to an Azure Storage Container using Blobfuse. :ivar account_name: The Azure Storage Account name. Required. @@ -372,7 +371,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AzureFileShareConfiguration(_model_base.Model): +class AzureFileShareConfiguration(_Model): """Information used to connect to an Azure Fileshare. :ivar account_name: The Azure Storage account name. Required. @@ -431,7 +430,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchApplication(_model_base.Model): +class BatchApplication(_Model): """Contains information about an application in an Azure Batch Account. :ivar id: A string that uniquely identifies the application within the Account. Required. @@ -469,7 +468,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchApplicationPackageReference(_model_base.Model): +class BatchApplicationPackageReference(_Model): """A reference to an Package to be deployed to Compute Nodes. :ivar application_id: The ID of the application to deploy. When creating a pool, the package's @@ -516,7 +515,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchAutoPoolSpecification(_model_base.Model): +class BatchAutoPoolSpecification(_Model): """Specifies characteristics for a temporary 'auto pool'. The Batch service will create this auto Pool when the Job is submitted. @@ -583,7 +582,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchCertificate(_model_base.Model): +class BatchCertificate(_Model): """A Certificate that can be installed on Compute Nodes and can be used to authenticate operations on the machine. @@ -686,7 +685,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchCertificateReference(_model_base.Model): +class BatchCertificateReference(_Model): """A reference to a Certificate to be installed on Compute Nodes in a Pool. Warning: This object is deprecated and will be removed after February, 2024. Please use the `Azure KeyVault Extension `_ @@ -774,7 +773,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchError(_model_base.Model): +class BatchError(_Model): """An error response received from the Azure Batch service. :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed @@ -820,7 +819,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchErrorDetail(_model_base.Model): +class BatchErrorDetail(_Model): """An item of additional information included in an Azure Batch error response. :ivar key: An identifier specifying the meaning of the Value property. @@ -853,7 +852,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchErrorMessage(_model_base.Model): +class BatchErrorMessage(_Model): """An error message received in an Azure Batch error response. :ivar lang: The language code of the error message. @@ -886,7 +885,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJob(_model_base.Model): +class BatchJob(_Model): """An Azure Batch Job. :ivar id: A string that uniquely identifies the Job within the Account. The ID is @@ -1113,7 +1112,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobConstraints(_model_base.Model): +class BatchJobConstraints(_Model): """The execution constraints for a Job. :ivar max_wall_clock_time: The maximum elapsed time that the Job may run, measured from the @@ -1169,7 +1168,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobCreateContent(_model_base.Model): +class BatchJobCreateContent(_Model): """Parameters for creating an Azure Batch Job. :ivar id: A string that uniquely identifies the Job within the Account. The ID can contain any @@ -1381,7 +1380,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobDisableContent(_model_base.Model): +class BatchJobDisableContent(_Model): """Parameters for disabling an Azure Batch Job. :ivar disable_tasks: What to do with active Tasks associated with the Job. Required. Known @@ -1413,7 +1412,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobExecutionInfo(_model_base.Model): +class BatchJobExecutionInfo(_Model): """Contains information about the execution of a Job in the Azure Batch service. :ivar start_time: The start time of the Job. This is the time at which the Job was created. @@ -1500,7 +1499,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobManagerTask(_model_base.Model): +class BatchJobManagerTask(_Model): """Specifies details of a Job Manager Task. The Job Manager Task is automatically started when the Job is created. The Batch service tries to schedule the Job Manager Task before any other Tasks in @@ -1762,7 +1761,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobNetworkConfiguration(_model_base.Model): +class BatchJobNetworkConfiguration(_Model): """The network configuration for the Job. :ivar subnet_id: The ARM resource identifier of the virtual network subnet which Compute Nodes @@ -1839,7 +1838,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobPreparationAndReleaseTaskStatus(_model_base.Model): +class BatchJobPreparationAndReleaseTaskStatus(_Model): """The status of the Job Preparation and Job Release Tasks on a Compute Node. :ivar pool_id: The ID of the Pool containing the Compute Node to which this entry refers. @@ -1896,7 +1895,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobPreparationTask(_model_base.Model): +class BatchJobPreparationTask(_Model): """A Job Preparation Task to run before any Tasks of the Job on any given Compute Node. You can use Job Preparation to prepare a Node to run Tasks for the Job. Activities commonly performed in Job Preparation include: Downloading common @@ -2086,7 +2085,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobPreparationTaskExecutionInfo(_model_base.Model): +class BatchJobPreparationTaskExecutionInfo(_Model): """Contains information about the execution of a Job Preparation Task on a Compute Node. @@ -2228,7 +2227,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobReleaseTask(_model_base.Model): +class BatchJobReleaseTask(_Model): """A Job Release Task to run on Job completion on any Compute Node where the Job has run. The Job Release Task runs when the Job ends, because of one of the following: The user calls the Terminate Job API, or the Delete Job API while the Job is @@ -2383,7 +2382,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobReleaseTaskExecutionInfo(_model_base.Model): +class BatchJobReleaseTaskExecutionInfo(_Model): """Contains information about the execution of a Job Release Task on a Compute Node. @@ -2494,7 +2493,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobSchedule(_model_base.Model): +class BatchJobSchedule(_Model): """A Job Schedule that allows recurring Jobs by specifying when to run Jobs and a specification used to create each Job. @@ -2622,7 +2621,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobScheduleConfiguration(_model_base.Model): +class BatchJobScheduleConfiguration(_Model): """The schedule according to which Jobs will be created. All times are fixed respective to UTC and are not impacted by daylight saving time. @@ -2726,7 +2725,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobScheduleCreateContent(_model_base.Model): +class BatchJobScheduleCreateContent(_Model): """Parameters for creating an Azure Batch Job Schedule. :ivar id: A string that uniquely identifies the schedule within the Account. The ID can contain @@ -2794,7 +2793,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobScheduleExecutionInfo(_model_base.Model): +class BatchJobScheduleExecutionInfo(_Model): """Contains information about Jobs that have been and will be run under a Job Schedule. @@ -2849,7 +2848,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobScheduleStatistics(_model_base.Model): +class BatchJobScheduleStatistics(_Model): """Resource usage statistics for a Job Schedule. :ivar url: The URL of the statistics. Required. @@ -3003,7 +3002,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobScheduleUpdateContent(_model_base.Model): +class BatchJobScheduleUpdateContent(_Model): """Parameters for updating an Azure Batch Job Schedule. :ivar schedule: The schedule according to which Jobs will be created. All times are fixed @@ -3057,7 +3056,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobSchedulingError(_model_base.Model): +class BatchJobSchedulingError(_Model): """An error encountered by the Batch service when scheduling a Job. :ivar category: The category of the Job scheduling error. Required. Known values are: @@ -3110,7 +3109,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobSpecification(_model_base.Model): +class BatchJobSpecification(_Model): """Specifies details of the Jobs to be created on a schedule. :ivar priority: The priority of Jobs created under this schedule. Priority values can range @@ -3315,7 +3314,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobStatistics(_model_base.Model): +class BatchJobStatistics(_Model): """Resource usage statistics for a Job. :ivar url: The URL of the statistics. Required. @@ -3460,7 +3459,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobTerminateContent(_model_base.Model): +class BatchJobTerminateContent(_Model): """Parameters for terminating an Azure Batch Job. :ivar termination_reason: The text you want to appear as the Job's TerminationReason. The @@ -3491,7 +3490,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchJobUpdateContent(_model_base.Model): +class BatchJobUpdateContent(_Model): """Parameters for updating an Azure Batch Job. :ivar priority: The priority of the Job. Priority values can range from -1000 to 1000, with @@ -3609,7 +3608,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNode(_model_base.Model): +class BatchNode(_Model): """A Compute Node in the Batch service. :ivar id: The ID of the Compute Node. Every Compute Node that is added to a Pool is assigned a @@ -3863,7 +3862,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeAgentInfo(_model_base.Model): +class BatchNodeAgentInfo(_Model): """The Batch Compute Node agent is a program that runs on each Compute Node in the Pool and provides Batch capability on the Compute Node. @@ -3908,7 +3907,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeCounts(_model_base.Model): +class BatchNodeCounts(_Model): """The number of Compute Nodes in each Compute Node state. :ivar creating: The number of Compute Nodes in the creating state. Required. @@ -4021,7 +4020,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeDeallocateContent(_model_base.Model): +class BatchNodeDeallocateContent(_Model): """Options for deallocating a Compute Node. :ivar node_deallocate_option: When to deallocate the Compute Node and what to do with currently @@ -4055,7 +4054,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeDisableSchedulingContent(_model_base.Model): +class BatchNodeDisableSchedulingContent(_Model): """Parameters for disabling scheduling on an Azure Batch Compute Node. :ivar node_disable_scheduling_option: What to do with currently running Tasks when disabling @@ -4089,7 +4088,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeEndpointConfiguration(_model_base.Model): +class BatchNodeEndpointConfiguration(_Model): """The endpoint configuration for the Compute Node. :ivar inbound_endpoints: The list of inbound endpoints that are accessible on the Compute Node. @@ -4120,7 +4119,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeError(_model_base.Model): +class BatchNodeError(_Model): """An error encountered by a Compute Node. :ivar code: An identifier for the Compute Node error. Codes are invariant and are intended to @@ -4164,7 +4163,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeFile(_model_base.Model): +class BatchNodeFile(_Model): """Information about a file or directory on a Compute Node. :ivar name: The file path. @@ -4211,7 +4210,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeIdentityReference(_model_base.Model): +class BatchNodeIdentityReference(_Model): """The reference to a user assigned identity associated with the Batch pool which a compute node will use. @@ -4242,7 +4241,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeInfo(_model_base.Model): +class BatchNodeInfo(_Model): """Information about the Compute Node on which a Task ran. :ivar affinity_id: An identifier for the Node on which the Task ran, which can be passed when @@ -4303,7 +4302,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodePlacementConfiguration(_model_base.Model): +class BatchNodePlacementConfiguration(_Model): """For regional placement, nodes in the pool will be allocated in the same region. For zonal placement, nodes in the pool will be spread across different zones with best effort balancing. @@ -4339,7 +4338,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeRebootContent(_model_base.Model): +class BatchNodeRebootContent(_Model): """Parameters for rebooting an Azure Batch Compute Node. :ivar node_reboot_option: When to reboot the Compute Node and what to do with currently running @@ -4373,7 +4372,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeReimageContent(_model_base.Model): +class BatchNodeReimageContent(_Model): """Parameters for reimaging an Azure Batch Compute Node. :ivar node_reimage_option: When to reimage the Compute Node and what to do with currently @@ -4407,7 +4406,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeRemoteLoginSettings(_model_base.Model): +class BatchNodeRemoteLoginSettings(_Model): """The remote login settings for a Compute Node. :ivar remote_login_ip_address: The IP address used for remote login to the Compute Node. @@ -4445,7 +4444,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeRemoveContent(_model_base.Model): +class BatchNodeRemoveContent(_Model): """Parameters for removing nodes from an Azure Batch Pool. :ivar node_list: A list containing the IDs of the Compute Nodes to be removed from the @@ -4499,7 +4498,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeUserCreateContent(_model_base.Model): +class BatchNodeUserCreateContent(_Model): """Parameters for creating a user account for RDP or SSH access on an Azure Batch Compute Node. :ivar name: The user name of the Account. Required. @@ -4565,7 +4564,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeUserUpdateContent(_model_base.Model): +class BatchNodeUserUpdateContent(_Model): """Parameters for updating a user account for RDP or SSH access on an Azure Batch Compute Node. :ivar password: The password of the Account. The password is required for Windows Compute @@ -4622,7 +4621,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchNodeVMExtension(_model_base.Model): +class BatchNodeVMExtension(_Model): """The configuration for virtual machine extension instance view. :ivar provisioning_state: The provisioning state of the virtual machine extension. @@ -4666,7 +4665,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPool(_model_base.Model): +class BatchPool(_Model): """A Pool in the Azure Batch service. :ivar id: A string that uniquely identifies the Pool within the Account. The ID can contain any @@ -4992,7 +4991,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolCreateContent(_model_base.Model): +class BatchPoolCreateContent(_Model): """Parameters for creating an Azure Batch Pool. :ivar id: A string that uniquely identifies the Pool within the Account. The ID can contain any @@ -5294,7 +5293,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolEnableAutoScaleContent(_model_base.Model): +class BatchPoolEnableAutoScaleContent(_Model): """Parameters for enabling automatic scaling on an Azure Batch Pool. :ivar auto_scale_formula: The formula for the desired number of Compute Nodes in the Pool. The @@ -5356,7 +5355,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolEndpointConfiguration(_model_base.Model): +class BatchPoolEndpointConfiguration(_Model): """The endpoint configuration for a Pool. :ivar inbound_nat_pools: A list of inbound NAT Pools that can be used to address specific ports @@ -5393,7 +5392,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolEvaluateAutoScaleContent(_model_base.Model): +class BatchPoolEvaluateAutoScaleContent(_Model): """Parameters for evaluating an automatic scaling formula on an Azure Batch Pool. :ivar auto_scale_formula: The formula for the desired number of Compute Nodes in the Pool. The @@ -5433,7 +5432,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolIdentity(_model_base.Model): +class BatchPoolIdentity(_Model): """The identity of the Batch pool, if configured. :ivar type: The identity of the Batch pool, if configured. The list of user identities @@ -5481,7 +5480,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolInfo(_model_base.Model): +class BatchPoolInfo(_Model): """Specifies how a Job should be assigned to a Pool. :ivar pool_id: The ID of an existing Pool. All the Tasks of the Job will run on the specified @@ -5538,7 +5537,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolNodeCounts(_model_base.Model): +class BatchPoolNodeCounts(_Model): """The number of Compute Nodes in each state for a Pool. :ivar pool_id: The ID of the Pool. Required. @@ -5580,7 +5579,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolReplaceContent(_model_base.Model): +class BatchPoolReplaceContent(_Model): """Parameters for replacing properties on an Azure Batch Pool. :ivar start_task: A Task to run on each Compute Node as it joins the Pool. The Task runs when @@ -5690,7 +5689,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolResizeContent(_model_base.Model): +class BatchPoolResizeContent(_Model): """Parameters for changing the size of an Azure Batch Pool. :ivar target_dedicated_nodes: The desired number of dedicated Compute Nodes in the Pool. @@ -5752,7 +5751,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolResourceStatistics(_model_base.Model): +class BatchPoolResourceStatistics(_Model): """Statistics related to resource consumption by Compute Nodes in a Pool. :ivar start_time: The start time of the time range covered by the statistics. Required. @@ -5871,7 +5870,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolSpecification(_model_base.Model): +class BatchPoolSpecification(_Model): """Specification for creating a new Pool. :ivar display_name: The display name for the Pool. The display name need not be unique and can @@ -6151,7 +6150,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolStatistics(_model_base.Model): +class BatchPoolStatistics(_Model): """Contains utilization and resource usage statistics for the lifetime of a Pool. :ivar url: The URL for the statistics. Required. @@ -6209,7 +6208,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolUpdateContent(_model_base.Model): +class BatchPoolUpdateContent(_Model): """Parameters for updating an Azure Batch Pool. :ivar display_name: The display name for the Pool. The display name need not be unique and can @@ -6439,7 +6438,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolUsageMetrics(_model_base.Model): +class BatchPoolUsageMetrics(_Model): """Usage metrics for a Pool across an aggregation interval. :ivar pool_id: The ID of the Pool whose metrics are aggregated in this entry. Required. @@ -6501,7 +6500,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchPoolUsageStatistics(_model_base.Model): +class BatchPoolUsageStatistics(_Model): """Statistics related to Pool usage information. :ivar start_time: The start time of the time range covered by the statistics. Required. @@ -6549,7 +6548,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchStartTask(_model_base.Model): +class BatchStartTask(_Model): """Batch will retry Tasks when a recovery operation is triggered on a Node. Examples of recovery operations include (but are not limited to) when an unhealthy Node is rebooted or a Compute Node disappeared due to host failure. @@ -6700,7 +6699,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchStartTaskInfo(_model_base.Model): +class BatchStartTaskInfo(_Model): """Information about a StartTask running on a Compute Node. :ivar state: The state of the StartTask on the Compute Node. Required. Known values are: @@ -6825,7 +6824,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchSubtask(_model_base.Model): +class BatchSubtask(_Model): """Information about an Azure Batch subtask. :ivar id: The ID of the subtask. @@ -6957,7 +6956,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchSupportedImage(_model_base.Model): +class BatchSupportedImage(_Model): """A reference to the Azure Virtual Machines Marketplace Image and additional information about the Image. @@ -7033,7 +7032,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTask(_model_base.Model): +class BatchTask(_Model): """Batch will retry Tasks when a recovery operation is triggered on a Node. Examples of recovery operations include (but are not limited to) when an unhealthy Node is rebooted or a Compute Node disappeared due to host failure. @@ -7297,7 +7296,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskAddCollectionResult(_model_base.Model): +class BatchTaskAddCollectionResult(_Model): """The result of adding a collection of Tasks to a Job. :ivar value: The results of the add Task collection operation. @@ -7327,7 +7326,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskAddResult(_model_base.Model): +class BatchTaskAddResult(_Model): """Result for a single Task added as part of an add Task collection operation. :ivar status: The status of the add Task request. Required. Known values are: "success", @@ -7392,7 +7391,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskConstraints(_model_base.Model): +class BatchTaskConstraints(_Model): """Execution constraints to apply to a Task. :ivar max_wall_clock_time: The maximum elapsed time that the Task may run, measured from the @@ -7460,7 +7459,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskContainerExecutionInfo(_model_base.Model): +class BatchTaskContainerExecutionInfo(_Model): """Contains information about the container which a Task is executing. :ivar container_id: The ID of the container. @@ -7506,7 +7505,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskContainerSettings(_model_base.Model): +class BatchTaskContainerSettings(_Model): """The container settings for a Task. :ivar container_run_options: Additional options to the container create command. These @@ -7580,7 +7579,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskCounts(_model_base.Model): +class BatchTaskCounts(_Model): """The Task counts for a Job. :ivar active: The number of Tasks in the active state. Required. @@ -7632,7 +7631,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskCountsResult(_model_base.Model): +class BatchTaskCountsResult(_Model): """The Task and TaskSlot counts for a Job. :ivar task_counts: The number of Tasks per state. Required. @@ -7669,7 +7668,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskCreateContent(_model_base.Model): +class BatchTaskCreateContent(_Model): """Parameters for creating an Azure Batch Task. :ivar id: A string that uniquely identifies the Task within the Job. The ID can contain any @@ -7905,7 +7904,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskDependencies(_model_base.Model): +class BatchTaskDependencies(_Model): """Specifies any dependencies of a Task. Any Task that is explicitly specified or within a dependency range must complete before the dependant Task will be scheduled. @@ -7954,7 +7953,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskExecutionInfo(_model_base.Model): +class BatchTaskExecutionInfo(_Model): """Information about the execution of a Task. :ivar start_time: The time at which the Task started running. 'Running' corresponds to the @@ -8094,7 +8093,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskFailureInfo(_model_base.Model): +class BatchTaskFailureInfo(_Model): """Information about a Task failure. :ivar category: The category of the Task error. Required. Known values are: "usererror" and @@ -8145,7 +8144,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskGroup(_model_base.Model): +class BatchTaskGroup(_Model): """A collection of Azure Batch Tasks to add. :ivar value: The collection of Tasks to add. The maximum count of Tasks is 100. The total @@ -8181,7 +8180,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskIdRange(_model_base.Model): +class BatchTaskIdRange(_Model): """The start and end of the range are inclusive. For example, if a range has start 9 and end 12, then it represents Tasks '9', '10', '11' and '12'. @@ -8215,7 +8214,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskInfo(_model_base.Model): +class BatchTaskInfo(_Model): """Information about a Task running on a Compute Node. :ivar task_url: The URL of the Task. @@ -8274,7 +8273,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskSchedulingPolicy(_model_base.Model): +class BatchTaskSchedulingPolicy(_Model): """Specifies how Tasks should be distributed across Compute Nodes. :ivar node_fill_type: How Tasks are distributed across Compute Nodes in a Pool. If not @@ -8306,7 +8305,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskSlotCounts(_model_base.Model): +class BatchTaskSlotCounts(_Model): """The TaskSlot counts for a Job. :ivar active: The number of TaskSlots for active Tasks. Required. @@ -8354,7 +8353,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BatchTaskStatistics(_model_base.Model): +class BatchTaskStatistics(_Model): """Resource usage statistics for a Task. :ivar url: The URL of the statistics. Required. @@ -8465,7 +8464,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class CifsMountConfiguration(_model_base.Model): +class CifsMountConfiguration(_Model): """Information used to connect to a CIFS file system. :ivar username: The user to use for authentication against the CIFS file system. Required. @@ -8523,7 +8522,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ContainerConfiguration(_model_base.Model): +class ContainerConfiguration(_Model): """The configuration for container-enabled Pools. :ivar type: The container technology to be used. Required. Known values are: "dockerCompatible" @@ -8575,7 +8574,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ContainerHostBatchBindMountEntry(_model_base.Model): +class ContainerHostBatchBindMountEntry(_Model): """The entry of path and mount mode you want to mount into task container. :ivar source: The path which be mounted to container customer can select. Known values are: @@ -8621,7 +8620,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ContainerRegistryReference(_model_base.Model): +class ContainerRegistryReference(_Model): """A private container registry. :ivar username: The user name to log into the registry server. @@ -8670,7 +8669,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataDisk(_model_base.Model): +class DataDisk(_Model): """Settings which will be used by the data disks associated to Compute Nodes in the Pool. When using attached data disks, you need to mount and format the disks from within a VM to use them. @@ -8734,7 +8733,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DeleteBatchCertificateError(_model_base.Model): +class DeleteBatchCertificateError(_Model): """An error encountered by the Batch service when deleting a Certificate. :ivar code: An identifier for the Certificate deletion error. Codes are invariant and are @@ -8784,7 +8783,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DiffDiskSettings(_model_base.Model): +class DiffDiskSettings(_Model): """Specifies the ephemeral Disk Settings for the operating system disk used by the compute node (VM). @@ -8833,7 +8832,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DiskEncryptionConfiguration(_model_base.Model): +class DiskEncryptionConfiguration(_Model): """The disk encryption configuration applied on compute nodes in the pool. Disk encryption configuration is not supported on Linux pool created with Azure Compute Gallery Image. @@ -8867,7 +8866,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class EnvironmentSetting(_model_base.Model): +class EnvironmentSetting(_Model): """An environment variable to be set on a Task process. :ivar name: The name of the environment variable. Required. @@ -8900,7 +8899,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ExitCodeMapping(_model_base.Model): +class ExitCodeMapping(_Model): """How the Batch service should respond if a Task exits with a particular exit code. @@ -8937,7 +8936,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ExitCodeRangeMapping(_model_base.Model): +class ExitCodeRangeMapping(_Model): """A range of exit codes and how the Batch service should respond to exit codes within that range. @@ -8980,7 +8979,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ExitConditions(_model_base.Model): +class ExitConditions(_Model): """Specifies how the Batch service should respond when the Task completes. :ivar exit_codes: A list of individual Task exit codes and how the Batch service should respond @@ -9053,7 +9052,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ExitOptions(_model_base.Model): +class ExitOptions(_Model): """Specifies how the Batch service responds to a particular exit condition. :ivar job_action: An action to take on the Job containing the Task, if the Task completes with @@ -9107,7 +9106,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class FileProperties(_model_base.Model): +class FileProperties(_Model): """The properties of a file on a Compute Node. :ivar creation_time: The file creation time. The creation time is not returned for files on @@ -9166,89 +9165,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class GetCertificateResponse(_model_base.Model): - """GetCertificateResponse. - - :ivar thumbprint: The X.509 thumbprint of the Certificate. This is a sequence of up to 40 hex - digits (it may include spaces but these are removed). Required. - :vartype thumbprint: str - :ivar thumbprint_algorithm: The algorithm used to derive the thumbprint. This must be sha1. - Required. - :vartype thumbprint_algorithm: str - :ivar url: The URL of the Certificate. - :vartype url: str - :ivar state: The state of the Certificate. Known values are: "active", "deleting", and - "deletefailed". - :vartype state: str or ~azure.batch.models.BatchCertificateState - :ivar state_transition_time: The time at which the Certificate entered its current state. - :vartype state_transition_time: ~datetime.datetime - :ivar previous_state: The previous state of the Certificate. This property is not set if the - Certificate is in its initial active state. Known values are: "active", "deleting", and - "deletefailed". - :vartype previous_state: str or ~azure.batch.models.BatchCertificateState - :ivar previous_state_transition_time: The time at which the Certificate entered its previous - state. This property is not set if the Certificate is in its initial Active state. - :vartype previous_state_transition_time: ~datetime.datetime - :ivar public_data: The public part of the Certificate as a base-64 encoded .cer file. - :vartype public_data: str - :ivar delete_certificate_error: The error that occurred on the last attempt to delete this - Certificate. This property is set only if the Certificate is in the DeleteFailed state. - :vartype delete_certificate_error: ~azure.batch.models.DeleteBatchCertificateError - """ - - thumbprint: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The X.509 thumbprint of the Certificate. This is a sequence of up to 40 hex digits (it may - include spaces but these are removed). Required.""" - thumbprint_algorithm: str = rest_field( - name="thumbprintAlgorithm", visibility=["read", "create", "update", "delete", "query"] - ) - """The algorithm used to derive the thumbprint. This must be sha1. Required.""" - url: Optional[str] = rest_field(visibility=["read"]) - """The URL of the Certificate.""" - state: Optional[Union[str, "_models.BatchCertificateState"]] = rest_field(visibility=["read"]) - """The state of the Certificate. Known values are: \"active\", \"deleting\", and \"deletefailed\".""" - state_transition_time: Optional[datetime.datetime] = rest_field( - name="stateTransitionTime", visibility=["read"], format="rfc3339" - ) - """The time at which the Certificate entered its current state.""" - previous_state: Optional[Union[str, "_models.BatchCertificateState"]] = rest_field( - name="previousState", visibility=["read"] - ) - """The previous state of the Certificate. This property is not set if the Certificate is in its - initial active state. Known values are: \"active\", \"deleting\", and \"deletefailed\".""" - previous_state_transition_time: Optional[datetime.datetime] = rest_field( - name="previousStateTransitionTime", visibility=["read"], format="rfc3339" - ) - """The time at which the Certificate entered its previous state. This property is not set if the - Certificate is in its initial Active state.""" - public_data: Optional[str] = rest_field(name="publicData", visibility=["read"]) - """The public part of the Certificate as a base-64 encoded .cer file.""" - delete_certificate_error: Optional["_models.DeleteBatchCertificateError"] = rest_field( - name="deleteCertificateError", visibility=["read"] - ) - """The error that occurred on the last attempt to delete this Certificate. This property is set - only if the Certificate is in the DeleteFailed state.""" - - @overload - def __init__( - self, - *, - thumbprint: str, - thumbprint_algorithm: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class HttpHeader(_model_base.Model): +class HttpHeader(_Model): """An HTTP header name-value pair. :ivar name: The case-insensitive name of the header to be used while uploading output files. @@ -9282,7 +9199,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ImageReference(_model_base.Model): +class ImageReference(_Model): """A reference to an Azure Virtual Machines Marketplace Image or a Azure Compute Gallery Image. To get the list of all Azure Marketplace Image references verified by Azure Batch, see the ' List Supported Images ' operation. @@ -9391,7 +9308,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class InboundEndpoint(_model_base.Model): +class InboundEndpoint(_Model): """An inbound endpoint on a Compute Node. :ivar name: The name of the endpoint. Required. @@ -9448,7 +9365,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class InboundNatPool(_model_base.Model): +class InboundNatPool(_Model): """A inbound NAT Pool that can be used to address specific ports on Compute Nodes in a Batch Pool externally. @@ -9548,7 +9465,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class InstanceViewStatus(_model_base.Model): +class InstanceViewStatus(_Model): """The instance view status. :ivar code: The status code. @@ -9602,7 +9519,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class LinuxUserConfiguration(_model_base.Model): +class LinuxUserConfiguration(_Model): """Properties used to create a user Account on a Linux Compute Node. :ivar uid: The user ID of the user Account. The uid and gid properties must be specified @@ -9657,7 +9574,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ManagedDisk(_model_base.Model): +class ManagedDisk(_Model): """The managed disk parameters. :ivar storage_account_type: The storage account type for managed disk. Known values are: @@ -9696,7 +9613,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class MetadataItem(_model_base.Model): +class MetadataItem(_Model): """The Batch service does not assign any meaning to this metadata; it is solely for the use of user code. @@ -9730,7 +9647,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class MountConfiguration(_model_base.Model): +class MountConfiguration(_Model): """The file system to mount on each node. :ivar azure_blob_file_system_configuration: The Azure Storage Container to mount using blob @@ -9790,7 +9707,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class MultiInstanceSettings(_model_base.Model): +class MultiInstanceSettings(_Model): """Multi-instance Tasks are commonly used to support MPI Tasks. In the MPI case, if any of the subtasks fail (for example due to exiting with a non-zero exit code) the entire multi-instance Task fails. The multi-instance Task is then @@ -9860,7 +9777,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class NameValuePair(_model_base.Model): +class NameValuePair(_Model): """Represents a name-value pair. :ivar name: The name in the name-value pair. @@ -9893,7 +9810,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class NetworkConfiguration(_model_base.Model): +class NetworkConfiguration(_Model): """The network configuration for a Pool. :ivar subnet_id: The ARM resource identifier of the virtual network subnet which the Compute @@ -9994,7 +9911,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class NetworkSecurityGroupRule(_model_base.Model): +class NetworkSecurityGroupRule(_Model): """A network security group rule to apply to an inbound endpoint. :ivar priority: The priority for this rule. Priorities within a Pool must be unique and are @@ -10068,7 +9985,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class NfsMountConfiguration(_model_base.Model): +class NfsMountConfiguration(_Model): """Information used to connect to an NFS file system. :ivar source: The URI of the file system to mount. Required. @@ -10116,7 +10033,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class OSDisk(_model_base.Model): +class OSDisk(_Model): """Settings for the operating system disk of the compute node (VM). :ivar ephemeral_os_disk_settings: Specifies the ephemeral Disk Settings for the operating @@ -10181,7 +10098,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class OutputFile(_model_base.Model): +class OutputFile(_Model): """On every file uploads, Batch service writes two log files to the compute node, 'fileuploadout.txt' and 'fileuploaderr.txt'. These log files are used to learn more about a specific failure. @@ -10252,7 +10169,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class OutputFileBlobContainerDestination(_model_base.Model): +class OutputFileBlobContainerDestination(_Model): """Specifies a file upload destination within an Azure blob storage container. :ivar path: The destination blob or virtual directory within the Azure Storage container. If @@ -10324,7 +10241,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class OutputFileDestination(_model_base.Model): +class OutputFileDestination(_Model): """The destination to which a file should be uploaded. :ivar container: A location in Azure blob storage to which files are uploaded. @@ -10354,7 +10271,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class OutputFileUploadConfig(_model_base.Model): +class OutputFileUploadConfig(_Model): """Options for an output file upload operation, including under what conditions to perform the upload. @@ -10389,7 +10306,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class PublicIpAddressConfiguration(_model_base.Model): +class PublicIpAddressConfiguration(_Model): """The public IP Address configuration of the networking configuration of a Pool. :ivar ip_address_provisioning_type: The provisioning type for Public IP Addresses for the Pool. @@ -10439,7 +10356,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class RecentBatchJob(_model_base.Model): +class RecentBatchJob(_Model): """Information about the most recent Job to run under the Job Schedule. :ivar id: The ID of the Job. @@ -10472,7 +10389,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ResizeError(_model_base.Model): +class ResizeError(_Model): """An error that occurred when resizing a Pool. :ivar code: An identifier for the Pool resize error. Codes are invariant and are intended to be @@ -10516,7 +10433,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ResourceFile(_model_base.Model): +class ResourceFile(_Model): """A single file or multiple files to be downloaded to a Compute Node. :ivar auto_storage_container_name: The storage container name in the auto storage Account. The @@ -10636,7 +10553,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class RollingUpgradePolicy(_model_base.Model): +class RollingUpgradePolicy(_Model): """The configuration parameters used while performing a rolling upgrade. :ivar enable_cross_zone_upgrade: Allow VMSS to ignore AZ boundaries when constructing upgrade @@ -10747,7 +10664,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class SecurityProfile(_model_base.Model): +class SecurityProfile(_Model): """Specifies the security profile settings for the virtual machine or virtual machine scale set. :ivar encryption_at_host: This property can be used by user in the request to enable or disable @@ -10810,7 +10727,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ServiceArtifactReference(_model_base.Model): +class ServiceArtifactReference(_Model): """Specifies the service artifact reference id used to set same image version for all virtual machines in the scale set when using 'latest' image version. @@ -10845,7 +10762,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class UefiSettings(_model_base.Model): +class UefiSettings(_Model): """Specifies the security settings like secure boot and vTPM used while creating the virtual machine. @@ -10884,7 +10801,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class UpgradePolicy(_model_base.Model): +class UpgradePolicy(_Model): """Describes an upgrade policy - automatic, manual, or rolling. :ivar mode: Specifies the mode of an upgrade to virtual machines in the scale set.

@@ -10939,7 +10856,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class UploadBatchServiceLogsContent(_model_base.Model): +class UploadBatchServiceLogsContent(_Model): """The Azure Batch service log files upload parameters for a Compute Node. :ivar container_url: The URL of the container within Azure Blob Storage to which to upload the @@ -11013,7 +10930,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class UploadBatchServiceLogsResult(_model_base.Model): +class UploadBatchServiceLogsResult(_Model): """The result of uploading Batch service log files from a specific Compute Node. :ivar virtual_directory_name: The virtual directory within Azure Blob Storage container to @@ -11055,7 +10972,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class UserAccount(_model_base.Model): +class UserAccount(_Model): """Properties used to create a user used to execute Tasks on an Azure Batch Compute Node. @@ -11121,7 +11038,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class UserAssignedIdentity(_model_base.Model): +class UserAssignedIdentity(_Model): """The user assigned Identity. :ivar resource_id: The ARM resource id of the user assigned identity. Required. @@ -11157,7 +11074,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class UserIdentity(_model_base.Model): +class UserIdentity(_Model): """The definition of the user identity under which the Task is run. Specify either the userName or autoUser property, but not both. @@ -11197,7 +11114,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class VirtualMachineConfiguration(_model_base.Model): +class VirtualMachineConfiguration(_Model): """The configuration for Compute Nodes in a Pool based on the Azure Virtual Machines infrastructure. @@ -11375,7 +11292,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class VirtualMachineInfo(_model_base.Model): +class VirtualMachineInfo(_Model): """Info about the current state of the virtual machine. :ivar image_reference: The reference to the Azure Virtual Machine's Marketplace Image. @@ -11415,7 +11332,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class VMDiskSecurityProfile(_model_base.Model): +class VMDiskSecurityProfile(_Model): """Specifies the security profile settings for the managed disk. **Note**: It can only be set for Confidential VMs and required when using Confidential VMs. @@ -11453,7 +11370,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class VMExtension(_model_base.Model): +class VMExtension(_Model): """The configuration for virtual machine extensions. :ivar name: The name of the virtual machine extension. Required. @@ -11541,7 +11458,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class VMExtensionInstanceView(_model_base.Model): +class VMExtensionInstanceView(_Model): """The vm extension instance view. :ivar name: The name of the vm extension instance view. @@ -11583,7 +11500,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class WindowsConfiguration(_model_base.Model): +class WindowsConfiguration(_Model): """Windows operating system settings to apply to the virtual machine. :ivar enable_automatic_updates: Whether automatic updates are enabled on the virtual machine. @@ -11615,7 +11532,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class WindowsUserConfiguration(_model_base.Model): +class WindowsUserConfiguration(_Model): """Properties used to create a user Account on a Windows Compute Node. :ivar login_mode: The login mode for the user. The default is 'batch'. Known values are: diff --git a/sdk/batch/azure-batch/azure/batch/models/_patch.py b/sdk/batch/azure-batch/azure/batch/models/_patch.py index 6435ce16c022..aeb7eed86afc 100644 --- a/sdk/batch/azure-batch/azure/batch/models/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/models/_patch.py @@ -19,6 +19,7 @@ "BatchFileProperties", ] # Add all objects you want publicly available to users at this package level + class CreateTasksError(HttpResponseError): """Aggregate Exception containing details for any failures from a task add operation. @@ -62,8 +63,8 @@ def __init__(self, pending_tasks=[], failure_tasks=[], errors=[]): ) super(CreateTasksError, self).__init__(self.message) -class BatchFileProperties: +class BatchFileProperties: """Information about a file or directory on a Compute Node with additional properties. :ivar url: The URL of the file. @@ -84,7 +85,7 @@ class BatchFileProperties: :vartype file_mode: str """ - url: Optional[str] + url: Optional[str] """The URL of the file.""" is_directory: Optional[bool] """Whether the object represents a directory.""" @@ -119,6 +120,7 @@ def __init__( self.content_type = content_type self.file_mode = file_mode + def patch_sdk(): """Do not remove from this file. diff --git a/sdk/batch/azure-batch/client/__init__.py b/sdk/batch/azure-batch/client/__init__.py new file mode 100644 index 000000000000..a02f2bbd5c47 --- /dev/null +++ b/sdk/batch/azure-batch/client/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import BatchClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "BatchClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/batch/azure-batch/client/_client.py b/sdk/batch/azure-batch/client/_client.py new file mode 100644 index 000000000000..787ba5643578 --- /dev/null +++ b/sdk/batch/azure-batch/client/_client.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from ._configuration import BatchClientConfiguration +from ._operations._operations import _BatchClientOperationsMixin +from ._utils.serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class BatchClient(_BatchClientOperationsMixin): + """BatchClient. + + :param endpoint: Batch account endpoint (for example: + `https://batchaccount.eastus2.batch.azure.com + `_). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-07-01.20.0". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = BatchClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + kwargs["request_id_header_name"] = "client-request-id" + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/batch/azure-batch/client/_configuration.py b/sdk/batch/azure-batch/client/_configuration.py new file mode 100644 index 000000000000..556c0855eefa --- /dev/null +++ b/sdk/batch/azure-batch/client/_configuration.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class BatchClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for BatchClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Batch account endpoint (for example: + `https://batchaccount.eastus2.batch.azure.com + `_). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-07-01.20.0". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-07-01.20.0") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://batch.core.windows.net//.default"]) + kwargs.setdefault("sdk_moniker", "batch/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/batch/azure-batch/client/_operations/__init__.py b/sdk/batch/azure-batch/client/_operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/batch/azure-batch/client/_operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/batch/azure-batch/client/_operations/_operations.py b/sdk/batch/azure-batch/client/_operations/_operations.py new file mode 100644 index 000000000000..7c616e00b417 --- /dev/null +++ b/sdk/batch/azure-batch/client/_operations/_operations.py @@ -0,0 +1,11483 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +import datetime +import json +from typing import Any, Callable, Dict, Iterator, List, Optional, TypeVar +import urllib.parse + +from azure.core import MatchConditions, PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from ...azure.batch import models as _azure_batch_models4 +from .._configuration import BatchClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from .._utils.serialization import Serializer +from .._utils.utils import ClientMixinABC, prep_if_match, prep_if_none_match + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_batch_list_applications_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/applications" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_application_request( + application_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/applications/{applicationId}" + path_format_arguments = { + "applicationId": _SERIALIZER.url("application_id", application_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_pool_usage_metrics_request( # pylint: disable=name-too-long + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + starttime: Optional[datetime.datetime] = None, + endtime: Optional[datetime.datetime] = None, + filter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/poolusagemetrics" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if starttime is not None: + _params["startTime"] = _SERIALIZER.query("starttime", starttime, "iso-8601") + if endtime is not None: + _params["endtime"] = _SERIALIZER.query("endtime", endtime, "iso-8601") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_pool_request( + *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_pools_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_pool_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_pool_exists_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_pool_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_update_pool_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_disable_pool_auto_scale_request( # pylint: disable=name-too-long + pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/disableautoscale" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_enable_pool_auto_scale_request( # pylint: disable=name-too-long + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/enableautoscale" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_evaluate_pool_auto_scale_request( # pylint: disable=name-too-long + pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/evaluateautoscale" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_resize_pool_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/resize" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_stop_pool_resize_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/stopresize" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_pool_properties_request( # pylint: disable=name-too-long + pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/updateproperties" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_remove_nodes_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/removenodes" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_supported_images_request( # pylint: disable=name-too-long + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/supportedimages" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_pool_node_counts_request( # pylint: disable=name-too-long + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/nodecounts" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_update_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_disable_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/disable" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_enable_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/enable" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_terminate_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/terminate" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_job_request( + *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_jobs_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_jobs_from_schedule_request( # pylint: disable=name-too-long + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}/jobs" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_job_preparation_and_release_task_status_request( # pylint: disable=name-too-long + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/jobpreparationandreleasetaskstatus" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_job_task_counts_request( + job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/taskcounts" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_certificate_request( + *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_certificates_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_cancel_certificate_deletion_request( # pylint: disable=name-too-long + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates(thumbprintAlgorithm={thumbprintAlgorithm},thumbprint={thumbprint})/canceldelete" + path_format_arguments = { + "thumbprintAlgorithm": _SERIALIZER.url("thumbprint_algorithm", thumbprint_algorithm, "str"), + "thumbprint": _SERIALIZER.url("thumbprint", thumbprint, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_certificate_request( + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates(thumbprintAlgorithm={thumbprintAlgorithm},thumbprint={thumbprint})" + path_format_arguments = { + "thumbprintAlgorithm": _SERIALIZER.url("thumbprint_algorithm", thumbprint_algorithm, "str"), + "thumbprint": _SERIALIZER.url("thumbprint", thumbprint, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_certificate_request( + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates(thumbprintAlgorithm={thumbprintAlgorithm},thumbprint={thumbprint})" + path_format_arguments = { + "thumbprintAlgorithm": _SERIALIZER.url("thumbprint_algorithm", thumbprint_algorithm, "str"), + "thumbprint": _SERIALIZER.url("thumbprint", thumbprint, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_job_schedule_exists_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_update_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_disable_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}/disable" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_enable_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}/enable" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_terminate_job_schedule_request( # pylint: disable=name-too-long + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}/terminate" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_job_schedule_request( + *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_job_schedules_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_task_request( + job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_tasks_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_task_collection_request( # pylint: disable=name-too-long + job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/addtaskcollection" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_sub_tasks_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/subtasksinfo" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_terminate_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/terminate" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_reactivate_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/reactivate" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_task_file_request( + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/files/{filePath}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if recursive is not None: + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_task_file_request( + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/octet-stream") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/files/{filePath}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if ocp_range is not None: + _headers["ocp-range"] = _SERIALIZER.header("ocp_range", ocp_range, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_task_file_properties_request( # pylint: disable=name-too-long + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/files/{filePath}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_task_files_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/files" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if recursive is not None: + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_node_user_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/users" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_node_user_request( + pool_id: str, + node_id: str, + user_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/users/{userName}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "userName": _SERIALIZER.url("user_name", user_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_node_user_request( + pool_id: str, + node_id: str, + user_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/users/{userName}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "userName": _SERIALIZER.url("user_name", user_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_reboot_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/reboot" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_start_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/start" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_reimage_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/reimage" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_deallocate_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/deallocate" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_disable_node_scheduling_request( # pylint: disable=name-too-long + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/disablescheduling" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_enable_node_scheduling_request( # pylint: disable=name-too-long + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/enablescheduling" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_remote_login_settings_request( # pylint: disable=name-too-long + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/remoteloginsettings" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_upload_node_logs_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/uploadbatchservicelogs" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_nodes_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_extension_request( + pool_id: str, + node_id: str, + extension_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/extensions/{extensionName}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "extensionName": _SERIALIZER.url("extension_name", extension_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_node_extensions_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/extensions" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_node_file_request( + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/files/{filePath}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if recursive is not None: + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_file_request( + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/octet-stream") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/files/{filePath}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if ocp_range is not None: + _headers["ocp-range"] = _SERIALIZER.header("ocp_range", ocp_range, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_file_properties_request( # pylint: disable=name-too-long + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/files/{filePath}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_node_files_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/files" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if recursive is not None: + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class _BatchClientOperationsMixin( # pylint: disable=too-many-public-methods + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], BatchClientConfiguration] +): + + @distributed_trace + def list_applications( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchApplication"]: + """Lists all of the applications available in the specified Account. + + This operation returns only Applications and versions that are available for + use on Compute Nodes; that is, that can be used in an Package reference. For + administrator information about applications and versions that are not yet + available to Compute Nodes, use the Azure portal or the Azure Resource Manager + API. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :return: An iterator like instance of BatchApplication + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchApplication] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchApplication]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_applications_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchApplication], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_application( + self, + application_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchApplication: + """Gets information about the specified Application. + + This operation returns only Applications and versions that are available for + use on Compute Nodes; that is, that can be used in an Package reference. For + administrator information about Applications and versions that are not yet + available to Compute Nodes, use the Azure portal or the Azure Resource Manager + API. + + :param application_id: The ID of the Application. Required. + :type application_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchApplication. The BatchApplication is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchApplication + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchApplication] = kwargs.pop("cls", None) + + _request = build_batch_get_application_request( + application_id=application_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchApplication, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_pool_usage_metrics( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + starttime: Optional[datetime.datetime] = None, + endtime: Optional[datetime.datetime] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchPoolUsageMetrics"]: + """Lists the usage metrics, aggregated by Pool across individual time intervals, + for the specified Account. + + If you do not specify a $filter clause including a poolId, the response + includes all Pools that existed in the Account in the time range of the + returned aggregation intervals. If you do not specify a $filter clause + including a startTime or endTime these filters default to the start and end + times of the last aggregation interval currently available; that is, only the + last aggregation interval is returned. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword starttime: The earliest time from which to include metrics. This must be at least two + and + a half hours before the current time. If not specified this defaults to the + start time of the last aggregation interval currently available. Default value is None. + :paramtype starttime: ~datetime.datetime + :keyword endtime: The latest time from which to include metrics. This must be at least two + hours + before the current time. If not specified this defaults to the end time of the + last aggregation interval currently available. Default value is None. + :paramtype endtime: ~datetime.datetime + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchPoolUsageMetrics + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchPoolUsageMetrics] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchPoolUsageMetrics]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pool_usage_metrics_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + starttime=starttime, + endtime=endtime, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchPoolUsageMetrics], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def create_pool( # pylint: disable=inconsistent-return-statements + self, + pool: _azure_batch_models4.BatchPoolCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Pool to the specified Account. + + When naming Pools, avoid including sensitive information such as user names or + secret project names. This information may appear in telemetry logs accessible + to Microsoft Support engineers. + + :param pool: The Pool to be created. Required. + :type pool: ~azure.batch.models.BatchPoolCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_pool_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_pools( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchPool"]: + """Lists all of the Pools which be mounted. + + Lists all of the Pools which be mounted. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-pools + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchPool + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchPool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchPool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pools_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchPool], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def delete_pool( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Pool from the specified Account. + + When you request that a Pool be deleted, the following actions occur: the Pool + state is set to deleting; any ongoing resize operation on the Pool are stopped; + the Batch service starts resizing the Pool to zero Compute Nodes; any Tasks + running on existing Compute Nodes are terminated and requeued (as if a resize + Pool operation had been requested with the default requeue option); finally, + the Pool is removed from the system. Because running Tasks are requeued, the + user can rerun these Tasks by updating their Job to target a different Pool. + The Tasks can then run on the new Pool. If you want to override the requeue + behavior, then you should call resize Pool explicitly to shrink the Pool to + zero size before deleting the Pool. If you call an Update, Patch or Delete API + on a Pool in the deleting state, it will fail with HTTP status code 409 with + error code PoolBeingDeleted. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def pool_exists( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Gets basic properties of a Pool. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_pool_exists_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def get_pool( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchPool: + """Gets information about the specified Pool. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchPool. The BatchPool is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchPool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchPool] = kwargs.pop("cls", None) + + _request = build_batch_get_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchPool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def update_pool( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + pool: _azure_batch_models4.BatchPoolUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Pool. + + This only replaces the Pool properties specified in the request. For example, + if the Pool has a StartTask associated with it, and a request does not specify + a StartTask element, then the Pool keeps the existing StartTask. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param pool: The pool properties to update. Required. + :type pool: ~azure.batch.models.BatchPoolUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def disable_pool_auto_scale( # pylint: disable=inconsistent-return-statements + self, pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any + ) -> None: + """Disables automatic scaling for a Pool. + + Disables automatic scaling for a Pool. + + :param pool_id: The ID of the Pool on which to disable automatic scaling. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_disable_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def enable_pool_auto_scale( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + content: _azure_batch_models4.BatchPoolEnableAutoScaleContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables automatic scaling for a Pool. + + You cannot enable automatic scaling on a Pool if a resize operation is in + progress on the Pool. If automatic scaling of the Pool is currently disabled, + you must specify a valid autoscale formula as part of the request. If automatic + scaling of the Pool is already enabled, you may specify a new autoscale formula + and/or a new evaluation interval. You cannot call this API for the same Pool + more than once every 30 seconds. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for enabling automatic scaling. Required. + :type content: ~azure.batch.models.BatchPoolEnableAutoScaleContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_enable_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def evaluate_pool_auto_scale( + self, + pool_id: str, + content: _azure_batch_models4.BatchPoolEvaluateAutoScaleContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.AutoScaleRun: + """Gets the result of evaluating an automatic scaling formula on the Pool. + + This API is primarily for validating an autoscale formula, as it simply returns + the result without applying the formula to the Pool. The Pool must have auto + scaling enabled in order to evaluate a formula. + + :param pool_id: The ID of the Pool on which to evaluate the automatic scaling formula. + Required. + :type pool_id: str + :param content: The options to use for evaluating the automatic scaling formula. Required. + :type content: ~azure.batch.models.BatchPoolEvaluateAutoScaleContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: AutoScaleRun. The AutoScaleRun is compatible with MutableMapping + :rtype: ~azure.batch.models.AutoScaleRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models4.AutoScaleRun] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_evaluate_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.AutoScaleRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def resize_pool( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + content: _azure_batch_models4.BatchPoolResizeContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Changes the number of Compute Nodes that are assigned to a Pool. + + You can only resize a Pool when its allocation state is steady. If the Pool is + already resizing, the request fails with status code 409. When you resize a + Pool, the Pool's allocation state changes from steady to resizing. You cannot + resize Pools which are configured for automatic scaling. If you try to do this, + the Batch service returns an error 409. If you resize a Pool downwards, the + Batch service chooses which Compute Nodes to remove. To remove specific Compute + Nodes, use the Pool remove Compute Nodes API instead. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for resizing the pool. Required. + :type content: ~azure.batch.models.BatchPoolResizeContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_resize_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def stop_pool_resize( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Stops an ongoing resize operation on the Pool. + + This does not restore the Pool to its previous state before the resize + operation: it only stops any further changes being made, and the Pool maintains + its current state. After stopping, the Pool stabilizes at the number of Compute + Nodes it was at when the stop operation was done. During the stop operation, + the Pool allocation state changes first to stopping and then to steady. A + resize operation need not be an explicit resize Pool request; this API can also + be used to halt the initial sizing of the Pool when it is created. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_stop_pool_resize_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def replace_pool_properties( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + pool: _azure_batch_models4.BatchPoolReplaceContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Pool. + + This fully replaces all the updatable properties of the Pool. For example, if + the Pool has a StartTask associated with it and if StartTask is not specified + with this request, then the Batch service will remove the existing StartTask. + + :param pool_id: The ID of the Pool to update. Required. + :type pool_id: str + :param pool: The options to use for replacing properties on the pool. Required. + :type pool: ~azure.batch.models.BatchPoolReplaceContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_pool_properties_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def remove_nodes( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + content: _azure_batch_models4.BatchNodeRemoveContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Removes Compute Nodes from the specified Pool. + + This operation can only run when the allocation state of the Pool is steady. + When this operation runs, the allocation state changes from steady to resizing. + Each request may remove up to 100 nodes. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for removing the node. Required. + :type content: ~azure.batch.models.BatchNodeRemoveContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_remove_nodes_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_supported_images( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchSupportedImage"]: + """Lists all Virtual Machine Images supported by the Azure Batch service. + + Lists all Virtual Machine Images supported by the Azure Batch service. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchSupportedImage + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchSupportedImage] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchSupportedImage]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_supported_images_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchSupportedImage], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_pool_node_counts( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchPoolNodeCounts"]: + """Gets the number of Compute Nodes in each state, grouped by Pool. Note that the + numbers returned may not always be up to date. If you need exact node counts, + use a list query. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchPoolNodeCounts + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchPoolNodeCounts] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchPoolNodeCounts]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pool_node_counts_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchPoolNodeCounts], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def delete_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Job. + + Deleting a Job also deletes all Tasks that are part of that Job, and all Job + statistics. This also overrides the retention period for Task data; that is, if + the Job contains Tasks which are still retained on Compute Nodes, the Batch + services deletes those Tasks' working directories and all their contents. When + a Delete Job request is received, the Batch service sets the Job to the + deleting state. All update operations on a Job that is in deleting state will + fail with status code 409 (Conflict), with additional information indicating + that the Job is being deleted. + + :param job_id: The ID of the Job to delete. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will delete the Job even if the corresponding nodes have + not fully processed the deletion. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_job( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchJob: + """Gets information about the specified Job. + + Gets information about the specified Job. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchJob. The BatchJob is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchJob + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchJob] = kwargs.pop("cls", None) + + _request = build_batch_get_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchJob, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def update_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + job: _azure_batch_models4.BatchJobUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job. + + This replaces only the Job properties specified in the request. For example, if + the Job has constraints, and a request does not specify the constraints + element, then the Job keeps the existing constraints. + + :param job_id: The ID of the Job whose properties you want to update. Required. + :type job_id: str + :param job: The options to use for updating the Job. Required. + :type job: ~azure.batch.models.BatchJobUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def replace_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + job: _azure_batch_models4.BatchJob, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job. + + This fully replaces all the updatable properties of the Job. For example, if + the Job has constraints associated with it and if constraints is not specified + with this request, then the Batch service will remove the existing constraints. + + :param job_id: The ID of the Job whose properties you want to update. Required. + :type job_id: str + :param job: A job with updated properties. Required. + :type job: ~azure.batch.models.BatchJob + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def disable_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + content: _azure_batch_models4.BatchJobDisableContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Disables the specified Job, preventing new Tasks from running. + + The Batch Service immediately moves the Job to the disabling state. Batch then + uses the disableTasks parameter to determine what to do with the currently + running Tasks of the Job. The Job remains in the disabling state until the + disable operation is completed and all Tasks have been dealt with according to + the disableTasks option; the Job then moves to the disabled state. No new Tasks + are started under the Job until it moves back to active state. If you try to + disable a Job that is in any state other than active, disabling, or disabled, + the request fails with status code 409. + + :param job_id: The ID of the Job to disable. Required. + :type job_id: str + :param content: The options to use for disabling the Job. Required. + :type content: ~azure.batch.models.BatchJobDisableContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_disable_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def enable_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables the specified Job, allowing new Tasks to run. + + When you call this API, the Batch service sets a disabled Job to the enabling + state. After the this operation is completed, the Job moves to the active + state, and scheduling of new Tasks under the Job resumes. The Batch service + does not allow a Task to remain in the active state for more than 180 days. + Therefore, if you enable a Job containing active Tasks which were added more + than 180 days ago, those Tasks will not run. + + :param job_id: The ID of the Job to enable. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def terminate_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + parameters: Optional[_azure_batch_models4.BatchJobTerminateContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates the specified Job, marking it as completed. + + When a Terminate Job request is received, the Batch service sets the Job to the + terminating state. The Batch service then terminates any running Tasks + associated with the Job and runs any required Job release Tasks. Then the Job + moves into the completed state. If there are any Tasks in the Job in the active + state, they will remain in the active state. Once a Job is terminated, new + Tasks cannot be added and any remaining active Tasks will not be scheduled. + + :param job_id: The ID of the Job to terminate. Required. + :type job_id: str + :param parameters: The options to use for terminating the Job. Default value is None. + :type parameters: ~azure.batch.models.BatchJobTerminateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will terminate the Job even if the corresponding nodes have + not fully processed the termination. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_terminate_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def create_job( # pylint: disable=inconsistent-return-statements + self, + job: _azure_batch_models4.BatchJobCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Job to the specified Account. + + The Batch service supports two ways to control the work done as part of a Job. + In the first approach, the user specifies a Job Manager Task. The Batch service + launches this Task when it is ready to start the Job. The Job Manager Task + controls all other Tasks that run under this Job, by using the Task APIs. In + the second approach, the user directly controls the execution of Tasks under an + active Job, by using the Task APIs. Also note: when naming Jobs, avoid + including sensitive information such as user names or secret project names. + This information may appear in telemetry logs accessible to Microsoft Support + engineers. + + :param job: The Job to be created. Required. + :type job: ~azure.batch.models.BatchJobCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_job_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_jobs( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchJob"]: + """Lists all of the Jobs in the specified Account. + + Lists all of the Jobs in the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs + `_. Default + value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJob + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchJob]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_jobs_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchJob], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_jobs_from_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchJob"]: + """Lists the Jobs that have been created under the specified Job Schedule. + + Lists the Jobs that have been created under the specified Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule from which you want to get a list of Jobs. + Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJob + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchJob]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_jobs_from_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchJob], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_job_preparation_and_release_task_status( # pylint: disable=name-too-long + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchJobPreparationAndReleaseTaskStatus"]: + """Lists the execution status of the Job Preparation and Job Release Task for the + specified Job across the Compute Nodes where the Job has run. + + This API returns the Job Preparation and Job Release Task status on all Compute + Nodes that have run the Job Preparation or Job Release Task. This includes + Compute Nodes which have since been removed from the Pool. If this API is + invoked on a Job which has no Job Preparation or Job Release Task, the Batch + service returns HTTP status code 409 (Conflict) with an error code of + JobPreparationTaskNotSpecified. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchJobPreparationAndReleaseTaskStatus + :rtype: + ~azure.core.paging.ItemPaged[~azure.batch.models.BatchJobPreparationAndReleaseTaskStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchJobPreparationAndReleaseTaskStatus]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_job_preparation_and_release_task_status_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_azure_batch_models4.BatchJobPreparationAndReleaseTaskStatus], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_job_task_counts( + self, job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any + ) -> _azure_batch_models4.BatchTaskCountsResult: + """Gets the Task counts for the specified Job. + + Task counts provide a count of the Tasks by active, running or completed Task + state, and a count of Tasks which succeeded or failed. Tasks in the preparing + state are counted as running. Note that the numbers returned may not always be + up to date. If you need exact task counts, use a list query. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchTaskCountsResult. The BatchTaskCountsResult is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchTaskCountsResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchTaskCountsResult] = kwargs.pop("cls", None) + + _request = build_batch_get_job_task_counts_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchTaskCountsResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def create_certificate( # pylint: disable=inconsistent-return-statements + self, + certificate: _azure_batch_models4.BatchCertificate, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Certificate to the specified Account. + + Creates a Certificate to the specified Account. + + :param certificate: The Certificate to be created. Required. + :type certificate: ~azure.batch.models.BatchCertificate + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(certificate, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_certificate_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_certificates( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchCertificate"]: + """Lists all of the Certificates that have been added to the specified Account. + + Lists all of the Certificates that have been added to the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-certificates + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchCertificate + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchCertificate] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchCertificate]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_certificates_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchCertificate], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def cancel_certificate_deletion( # pylint: disable=inconsistent-return-statements + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Cancels a failed deletion of a Certificate from the specified Account. + + If you try to delete a Certificate that is being used by a Pool or Compute + Node, the status of the Certificate changes to deleteFailed. If you decide that + you want to continue using the Certificate, you can use this operation to set + the status of the Certificate back to active. If you intend to delete the + Certificate, you do not need to run this operation after the deletion failed. + You must make sure that the Certificate is not being used by any resources, and + then you can try again to delete the Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate being deleted. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_cancel_certificate_deletion_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def delete_certificate( # pylint: disable=inconsistent-return-statements + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deletes a Certificate from the specified Account. + + You cannot delete a Certificate if a resource (Pool or Compute Node) is using + it. Before you can delete a Certificate, you must therefore make sure that the + Certificate is not associated with any existing Pools, the Certificate is not + installed on any Nodes (even if you remove a Certificate from a Pool, it is not + removed from existing Compute Nodes in that Pool until they restart), and no + running Tasks depend on the Certificate. If you try to delete a Certificate + that is in use, the deletion fails. The Certificate status changes to + deleteFailed. You can use Cancel Delete Certificate to set the status back to + active if you decide that you want to continue using the Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate to be deleted. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_certificate_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_certificate( + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchCertificate: + """Gets information about the specified Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate to get. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchCertificate. The BatchCertificate is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchCertificate + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchCertificate] = kwargs.pop("cls", None) + + _request = build_batch_get_certificate_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchCertificate, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def job_schedule_exists( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Checks the specified Job Schedule exists. + + Checks the specified Job Schedule exists. + + :param job_schedule_id: The ID of the Job Schedule which you want to check. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_job_schedule_exists_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def delete_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Job Schedule from the specified Account. + + When you delete a Job Schedule, this also deletes all Jobs and Tasks under that + schedule. When Tasks are deleted, all the files in their working directories on + the Compute Nodes are also deleted (the retention period is ignored). The Job + Schedule statistics are no longer accessible once the Job Schedule is deleted, + though they are still counted towards Account lifetime statistics. + + :param job_schedule_id: The ID of the Job Schedule to delete. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will delete the JobSchedule even if the corresponding nodes + have not fully processed the deletion. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchJobSchedule: + """Gets information about the specified Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to get. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchJobSchedule. The BatchJobSchedule is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchJobSchedule + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchJobSchedule] = kwargs.pop("cls", None) + + _request = build_batch_get_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchJobSchedule, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def update_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + job_schedule: _azure_batch_models4.BatchJobScheduleUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job Schedule. + + This replaces only the Job Schedule properties specified in the request. For + example, if the schedule property is not specified with this request, then the + Batch service will keep the existing schedule. Changes to a Job Schedule only + impact Jobs created by the schedule after the update has taken place; currently + running Jobs are unaffected. + + :param job_schedule_id: The ID of the Job Schedule to update. Required. + :type job_schedule_id: str + :param job_schedule: The options to use for updating the Job Schedule. Required. + :type job_schedule: ~azure.batch.models.BatchJobScheduleUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def replace_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + job_schedule: _azure_batch_models4.BatchJobSchedule, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job Schedule. + + This fully replaces all the updatable properties of the Job Schedule. For + example, if the schedule property is not specified with this request, then the + Batch service will remove the existing schedule. Changes to a Job Schedule only + impact Jobs created by the schedule after the update has taken place; currently + running Jobs are unaffected. + + :param job_schedule_id: The ID of the Job Schedule to update. Required. + :type job_schedule_id: str + :param job_schedule: A Job Schedule with updated properties. Required. + :type job_schedule: ~azure.batch.models.BatchJobSchedule + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def disable_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Disables a Job Schedule. + + No new Jobs will be created until the Job Schedule is enabled again. + + :param job_schedule_id: The ID of the Job Schedule to disable. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_disable_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def enable_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables a Job Schedule. + + Enables a Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to enable. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def terminate_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates a Job Schedule. + + Terminates a Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to terminates. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will terminate the JobSchedule even if the corresponding + nodes have not fully processed the termination. The default value is false. Default value is + None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_terminate_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def create_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule: _azure_batch_models4.BatchJobScheduleCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Job Schedule to the specified Account. + + Creates a Job Schedule to the specified Account. + + :param job_schedule: The Job Schedule to be created. Required. + :type job_schedule: ~azure.batch.models.BatchJobScheduleCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_job_schedule_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_job_schedules( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchJobSchedule"]: + """Lists all of the Job Schedules in the specified Account. + + Lists all of the Job Schedules in the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-schedules + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJobSchedule + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchJobSchedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchJobSchedule]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_job_schedules_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchJobSchedule], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def create_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task: _azure_batch_models4.BatchTaskCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Task to the specified Job. + + The maximum lifetime of a Task from addition to completion is 180 days. If a + Task has not completed within 180 days of being added it will be terminated by + the Batch service and left in whatever state it was in at that time. + + :param job_id: The ID of the Job to which the Task is to be created. Required. + :type job_id: str + :param task: The Task to be created. Required. + :type task: ~azure.batch.models.BatchTaskCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(task, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_task_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_tasks( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchTask"]: + """Lists all of the Tasks that are associated with the specified Job. + + For multi-instance Tasks, information such as affinityId, executionInfo and + nodeInfo refer to the primary Task. Use the list subtasks API to retrieve + information about subtasks. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-tasks + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchTask + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchTask] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchTask]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_tasks_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchTask], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def create_task_collection( + self, + job_id: str, + task_collection: _azure_batch_models4.BatchTaskGroup, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchTaskAddCollectionResult: + """Adds a collection of Tasks to the specified Job. + + Note that each Task must have a unique ID. The Batch service may not return the + results for each Task in the same order the Tasks were submitted in this + request. If the server times out or the connection is closed during the + request, the request may have been partially or fully processed, or not at all. + In such cases, the user should re-issue the request. Note that it is up to the + user to correctly handle failures when re-issuing a request. For example, you + should use the same Task IDs during a retry so that if the prior operation + succeeded, the retry will not create extra Tasks unexpectedly. If the response + contains any Tasks which failed to add, a client can retry the request. In a + retry, it is most efficient to resubmit only Tasks that failed to add, and to + omit Tasks that were successfully added on the first attempt. The maximum + lifetime of a Task from addition to completion is 180 days. If a Task has not + completed within 180 days of being added it will be terminated by the Batch + service and left in whatever state it was in at that time. + + :param job_id: The ID of the Job to which the Task collection is to be added. Required. + :type job_id: str + :param task_collection: The Tasks to be added. Required. + :type task_collection: ~azure.batch.models.BatchTaskGroup + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchTaskAddCollectionResult. The BatchTaskAddCollectionResult is compatible with + MutableMapping + :rtype: ~azure.batch.models.BatchTaskAddCollectionResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models4.BatchTaskAddCollectionResult] = kwargs.pop("cls", None) + + _content = json.dumps(task_collection, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_task_collection_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchTaskAddCollectionResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Task from the specified Job. + + When a Task is deleted, all of the files in its directory on the Compute Node + where it ran are also deleted (regardless of the retention time). For + multi-instance Tasks, the delete Task operation applies synchronously to the + primary task; subtasks and their files are then deleted asynchronously in the + background. + + :param job_id: The ID of the Job from which to delete the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to delete. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchTask: + """Gets information about the specified Task. + + For multi-instance Tasks, information such as affinityId, executionInfo and + nodeInfo refer to the primary Task. Use the list subtasks API to retrieve + information about subtasks. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to get information about. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchTask. The BatchTask is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchTask + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchTask] = kwargs.pop("cls", None) + + _request = build_batch_get_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchTask, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def replace_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + task: _azure_batch_models4.BatchTask, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Task. + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to update. Required. + :type task_id: str + :param task: The Task to update. Required. + :type task: ~azure.batch.models.BatchTask + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(task, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_sub_tasks( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchSubtask"]: + """Lists all of the subtasks that are associated with the specified multi-instance + Task. + + If the Task is not a multi-instance Task then this returns an empty collection. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :param task_id: The ID of the Task. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchSubtask + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchSubtask] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchSubtask]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_sub_tasks_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchSubtask], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def terminate_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates the specified Task. + + When the Task has been terminated, it moves to the completed state. For + multi-instance Tasks, the terminate Task operation applies synchronously to the + primary task; subtasks are then terminated asynchronously in the background. + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to terminate. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_terminate_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def reactivate_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Reactivates a Task, allowing it to run again even if its retry count has been + exhausted. + + Reactivation makes a Task eligible to be retried again up to its maximum retry + count. The Task's state is changed to active. As the Task is no longer in the + completed state, any previous exit code or failure information is no longer + available after reactivation. Each time a Task is reactivated, its retry count + is reset to 0. Reactivation will fail for Tasks that are not completed or that + previously completed successfully (with an exit code of 0). Additionally, it + will fail if the Job has completed (or is terminating or deleting). + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to reactivate. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_reactivate_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def delete_task_file( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> None: + """Deletes the specified Task file from the Compute Node where the Task ran. + + Deletes the specified Task file from the Compute Node where the Task ran. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword recursive: Whether to delete children of a directory. If the filePath parameter + represents + a directory instead of a file, you can set recursive to true to delete the + directory and all of the files and subdirectories in it. If recursive is false + then the directory must be empty or deletion will fail. Default value is None. + :paramtype recursive: bool + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_task_file_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_task_file( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any + ) -> Iterator[bytes]: + """Returns the content of the specified Task file. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword ocp_range: The byte range to be retrieved. The default is to retrieve the entire file. + The + format is bytes=startRange-endRange. Default value is None. + :paramtype ocp_range: str + :return: Iterator[bytes] + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_batch_get_task_file_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + ocp_range=ocp_range, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + response_headers["content-type"] = self._deserialize("str", response.headers.get("content-type")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_task_file_properties( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> bool: + """Gets the properties of the specified Task file. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_get_task_file_properties_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def list_task_files( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchNodeFile"]: + """Lists the files in a Task's directory on its Compute Node. + + Lists the files in a Task's directory on its Compute Node. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose files you want to list. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-task-files + `_. + Default value is None. + :paramtype filter: str + :keyword recursive: Whether to list children of the Task directory. This parameter can be used + in + combination with the filter parameter to list specific type of files. Default value is None. + :paramtype recursive: bool + :return: An iterator like instance of BatchNodeFile + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchNodeFile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchNodeFile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_task_files_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchNodeFile], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def create_node_user( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + user: _azure_batch_models4.BatchNodeUserCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Adds a user Account to the specified Compute Node. + + You can add a user Account to a Compute Node only when it is in the idle or + running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to create a user Account. Required. + :type node_id: str + :param user: The options to use for creating the user. Required. + :type user: ~azure.batch.models.BatchNodeUserCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(user, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_node_user_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def delete_node_user( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + user_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deletes a user Account from the specified Compute Node. + + You can delete a user Account to a Compute Node only when it is in the idle or + running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to delete a user Account. Required. + :type node_id: str + :param user_name: The name of the user Account to delete. Required. + :type user_name: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_node_user_request( + pool_id=pool_id, + node_id=node_id, + user_name=user_name, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def replace_node_user( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + user_name: str, + content: _azure_batch_models4.BatchNodeUserUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Updates the password and expiration time of a user Account on the specified Compute Node. + + This operation replaces of all the updatable properties of the Account. For + example, if the expiryTime element is not specified, the current value is + replaced with the default value, not left unmodified. You can update a user + Account on a Compute Node only when it is in the idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to update a user Account. Required. + :type node_id: str + :param user_name: The name of the user Account to update. Required. + :type user_name: str + :param content: The options to use for updating the user. Required. + :type content: ~azure.batch.models.BatchNodeUserUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_node_user_request( + pool_id=pool_id, + node_id=node_id, + user_name=user_name, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_node( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchNode: + """Gets information about the specified Compute Node. + + Gets information about the specified Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to get information about. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchNode. The BatchNode is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchNode + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchNode] = kwargs.pop("cls", None) + + _request = build_batch_get_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchNode, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def reboot_node( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models4.BatchNodeRebootContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Restarts the specified Compute Node. + + You can restart a Compute Node only if it is in an idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for rebooting the Compute Node. Default value is None. + :type parameters: ~azure.batch.models.BatchNodeRebootContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_reboot_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def start_node( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Starts the specified Compute Node. + + You can start a Compute Node only if it has been deallocated. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_start_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def reimage_node( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models4.BatchNodeReimageContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Reinstalls the operating system on the specified Compute Node. + + You can reinstall the operating system on a Compute Node only if it is in an + idle or running state. This API can be invoked only on Pools created with the + cloud service configuration property. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for reimaging the Compute Node. Default value is None. + :type parameters: ~azure.batch.models.BatchNodeReimageContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_reimage_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def deallocate_node( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models4.BatchNodeDeallocateContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deallocates the specified Compute Node. + + You can deallocate a Compute Node only if it is in an idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for deallocating the Compute Node. Default value is None. + :type parameters: ~azure.batch.models.BatchNodeDeallocateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_deallocate_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def disable_node_scheduling( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models4.BatchNodeDisableSchedulingContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Disables Task scheduling on the specified Compute Node. + + You can disable Task scheduling on a Compute Node only if its current + scheduling state is enabled. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node on which you want to disable Task scheduling. + Required. + :type node_id: str + :param parameters: The options to use for disabling scheduling on the Compute Node. Default + value is None. + :type parameters: ~azure.batch.models.BatchNodeDisableSchedulingContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_disable_node_scheduling_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def enable_node_scheduling( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Enables Task scheduling on the specified Compute Node. + + You can enable Task scheduling on a Compute Node only if its current scheduling + state is disabled. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node on which you want to enable Task scheduling. + Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_node_scheduling_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_node_remote_login_settings( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchNodeRemoteLoginSettings: + """Gets the settings required for remote login to a Compute Node. + + Before you can remotely login to a Compute Node using the remote login settings, + you must create a user Account on the Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node for which to obtain the remote login settings. + Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchNodeRemoteLoginSettings. The BatchNodeRemoteLoginSettings is compatible with + MutableMapping + :rtype: ~azure.batch.models.BatchNodeRemoteLoginSettings + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchNodeRemoteLoginSettings] = kwargs.pop("cls", None) + + _request = build_batch_get_node_remote_login_settings_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchNodeRemoteLoginSettings, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def upload_node_logs( + self, + pool_id: str, + node_id: str, + content: _azure_batch_models4.UploadBatchServiceLogsContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.UploadBatchServiceLogsResult: + """Upload Azure Batch service log files from the specified Compute Node to Azure + Blob Storage. + + This is for gathering Azure Batch service log files in an automated fashion + from Compute Nodes if you are experiencing an error and wish to escalate to + Azure support. The Azure Batch service log files should be shared with Azure + support to aid in debugging issues with the Batch service. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node for which you want to get the Remote Desktop + Protocol file. Required. + :type node_id: str + :param content: The Azure Batch service log files upload options. Required. + :type content: ~azure.batch.models.UploadBatchServiceLogsContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: UploadBatchServiceLogsResult. The UploadBatchServiceLogsResult is compatible with + MutableMapping + :rtype: ~azure.batch.models.UploadBatchServiceLogsResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models4.UploadBatchServiceLogsResult] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_upload_node_logs_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.UploadBatchServiceLogsResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_nodes( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchNode"]: + """Lists the Compute Nodes in the specified Pool. + + Lists the Compute Nodes in the specified Pool. + + :param pool_id: The ID of the Pool from which you want to list Compute Nodes. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchNode + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchNode] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchNode]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_nodes_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchNode], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_node_extension( + self, + pool_id: str, + node_id: str, + extension_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchNodeVMExtension: + """Gets information about the specified Compute Node Extension. + + Gets information about the specified Compute Node Extension. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that contains the extensions. Required. + :type node_id: str + :param extension_name: The name of the Compute Node Extension that you want to get information + about. Required. + :type extension_name: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchNodeVMExtension. The BatchNodeVMExtension is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchNodeVMExtension + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchNodeVMExtension] = kwargs.pop("cls", None) + + _request = build_batch_get_node_extension_request( + pool_id=pool_id, + node_id=node_id, + extension_name=extension_name, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchNodeVMExtension, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_node_extensions( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchNodeVMExtension"]: + """Lists the Compute Nodes Extensions in the specified Pool. + + Lists the Compute Nodes Extensions in the specified Pool. + + :param pool_id: The ID of the Pool that contains Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to list extensions. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchNodeVMExtension + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchNodeVMExtension] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchNodeVMExtension]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_node_extensions_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchNodeVMExtension], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def delete_node_file( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> None: + """Deletes the specified file from the Compute Node. + + Deletes the specified file from the Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword recursive: Whether to delete children of a directory. If the filePath parameter + represents + a directory instead of a file, you can set recursive to true to delete the + directory and all of the files and subdirectories in it. If recursive is false + then the directory must be empty or deletion will fail. Default value is None. + :paramtype recursive: bool + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_node_file_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_node_file( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any + ) -> Iterator[bytes]: + """Returns the content of the specified Compute Node file. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword ocp_range: The byte range to be retrieved. The default is to retrieve the entire file. + The + format is bytes=startRange-endRange. Default value is None. + :paramtype ocp_range: str + :return: Iterator[bytes] + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_batch_get_node_file_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + ocp_range=ocp_range, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + response_headers["content-type"] = self._deserialize("str", response.headers.get("content-type")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_node_file_properties( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> bool: + """Gets the properties of the specified Compute Node file. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_get_node_file_properties_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def list_node_files( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> ItemPaged["_azure_batch_models4.BatchNodeFile"]: + """Lists all of the files in Task directories on the specified Compute Node. + + Lists all of the files in Task directories on the specified Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node whose files you want to list. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files + `_. + Default value is None. + :paramtype filter: str + :keyword recursive: Whether to list children of a directory. Default value is None. + :paramtype recursive: bool + :return: An iterator like instance of BatchNodeFile + :rtype: ~azure.core.paging.ItemPaged[~azure.batch.models.BatchNodeFile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchNodeFile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_node_files_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchNodeFile], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) diff --git a/sdk/batch/azure-batch/client/_operations/_patch.py b/sdk/batch/azure-batch/client/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/batch/azure-batch/client/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/batch/azure-batch/client/_patch.py b/sdk/batch/azure-batch/client/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/batch/azure-batch/client/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/batch/azure-batch/client/_utils/__init__.py b/sdk/batch/azure-batch/client/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/batch/azure-batch/client/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/batch/azure-batch/client/_utils/model_base.py b/sdk/batch/azure-batch/client/_utils/model_base.py new file mode 100644 index 000000000000..49d5c7259389 --- /dev/null +++ b/sdk/batch/azure-batch/client/_utils/model_base.py @@ -0,0 +1,1232 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, value, module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + value: typing.Any, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, value) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/batch/azure-batch/client/_utils/serialization.py b/sdk/batch/azure-batch/client/_utils/serialization.py new file mode 100644 index 000000000000..eb86ea23c965 --- /dev/null +++ b/sdk/batch/azure-batch/client/_utils/serialization.py @@ -0,0 +1,2032 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/batch/azure-batch/client/_utils/utils.py b/sdk/batch/azure-batch/client/_utils/utils.py new file mode 100644 index 000000000000..927adb7c8ae2 --- /dev/null +++ b/sdk/batch/azure-batch/client/_utils/utils.py @@ -0,0 +1,57 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import Generic, Optional, TYPE_CHECKING, TypeVar + +from azure.core import MatchConditions + +if TYPE_CHECKING: + from .serialization import Deserializer, Serializer + + +TClient = TypeVar("TClient") +TConfig = TypeVar("TConfig") + + +class ClientMixinABC(ABC, Generic[TClient, TConfig]): + """DO NOT use this class. It is for internal typing use only.""" + + _client: TClient + _config: TConfig + _serialize: "Serializer" + _deserialize: "Deserializer" + + +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None diff --git a/sdk/batch/azure-batch/client/_version.py b/sdk/batch/azure-batch/client/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/batch/azure-batch/client/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/batch/azure-batch/client/aio/__init__.py b/sdk/batch/azure-batch/client/aio/__init__.py new file mode 100644 index 000000000000..b1e0d367b042 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import BatchClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "BatchClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/batch/azure-batch/client/aio/_client.py b/sdk/batch/azure-batch/client/aio/_client.py new file mode 100644 index 000000000000..0f923f668086 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_client.py @@ -0,0 +1,103 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import BatchClientConfiguration +from ._operations._operations import _BatchClientOperationsMixin + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class BatchClient(_BatchClientOperationsMixin): + """BatchClient. + + :param endpoint: Batch account endpoint (for example: + `https://batchaccount.eastus2.batch.azure.com + `_). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-07-01.20.0". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = BatchClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + kwargs["request_id_header_name"] = "client-request-id" + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/batch/azure-batch/client/aio/_configuration.py b/sdk/batch/azure-batch/client/aio/_configuration.py new file mode 100644 index 000000000000..296a4e20cbe0 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_configuration.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class BatchClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for BatchClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Batch account endpoint (for example: + `https://batchaccount.eastus2.batch.azure.com + `_). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-07-01.20.0". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-07-01.20.0") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://batch.core.windows.net//.default"]) + kwargs.setdefault("sdk_moniker", "batch/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/batch/azure-batch/client/aio/_operations/__init__.py b/sdk/batch/azure-batch/client/aio/_operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/batch/azure-batch/client/aio/_operations/_operations.py b/sdk/batch/azure-batch/client/aio/_operations/_operations.py new file mode 100644 index 000000000000..fc63bc74ca7f --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_operations/_operations.py @@ -0,0 +1,8307 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +import datetime +import json +from typing import Any, AsyncIterator, Callable, Dict, List, Optional, TypeVar +import urllib.parse + +from azure.core import AsyncPipelineClient, MatchConditions +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ....azure.batch import models as _azure_batch_models5 +from ..._operations._operations import ( + build_batch_cancel_certificate_deletion_request, + build_batch_create_certificate_request, + build_batch_create_job_request, + build_batch_create_job_schedule_request, + build_batch_create_node_user_request, + build_batch_create_pool_request, + build_batch_create_task_collection_request, + build_batch_create_task_request, + build_batch_deallocate_node_request, + build_batch_delete_certificate_request, + build_batch_delete_job_request, + build_batch_delete_job_schedule_request, + build_batch_delete_node_file_request, + build_batch_delete_node_user_request, + build_batch_delete_pool_request, + build_batch_delete_task_file_request, + build_batch_delete_task_request, + build_batch_disable_job_request, + build_batch_disable_job_schedule_request, + build_batch_disable_node_scheduling_request, + build_batch_disable_pool_auto_scale_request, + build_batch_enable_job_request, + build_batch_enable_job_schedule_request, + build_batch_enable_node_scheduling_request, + build_batch_enable_pool_auto_scale_request, + build_batch_evaluate_pool_auto_scale_request, + build_batch_get_application_request, + build_batch_get_certificate_request, + build_batch_get_job_request, + build_batch_get_job_schedule_request, + build_batch_get_job_task_counts_request, + build_batch_get_node_extension_request, + build_batch_get_node_file_properties_request, + build_batch_get_node_file_request, + build_batch_get_node_remote_login_settings_request, + build_batch_get_node_request, + build_batch_get_pool_request, + build_batch_get_task_file_properties_request, + build_batch_get_task_file_request, + build_batch_get_task_request, + build_batch_job_schedule_exists_request, + build_batch_list_applications_request, + build_batch_list_certificates_request, + build_batch_list_job_preparation_and_release_task_status_request, + build_batch_list_job_schedules_request, + build_batch_list_jobs_from_schedule_request, + build_batch_list_jobs_request, + build_batch_list_node_extensions_request, + build_batch_list_node_files_request, + build_batch_list_nodes_request, + build_batch_list_pool_node_counts_request, + build_batch_list_pool_usage_metrics_request, + build_batch_list_pools_request, + build_batch_list_sub_tasks_request, + build_batch_list_supported_images_request, + build_batch_list_task_files_request, + build_batch_list_tasks_request, + build_batch_pool_exists_request, + build_batch_reactivate_task_request, + build_batch_reboot_node_request, + build_batch_reimage_node_request, + build_batch_remove_nodes_request, + build_batch_replace_job_request, + build_batch_replace_job_schedule_request, + build_batch_replace_node_user_request, + build_batch_replace_pool_properties_request, + build_batch_replace_task_request, + build_batch_resize_pool_request, + build_batch_start_node_request, + build_batch_stop_pool_resize_request, + build_batch_terminate_job_request, + build_batch_terminate_job_schedule_request, + build_batch_terminate_task_request, + build_batch_update_job_request, + build_batch_update_job_schedule_request, + build_batch_update_pool_request, + build_batch_upload_node_logs_request, +) +from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._utils.utils import ClientMixinABC +from .._configuration import BatchClientConfiguration + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class _BatchClientOperationsMixin( # pylint: disable=too-many-public-methods + ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], BatchClientConfiguration] +): + + @distributed_trace + def list_applications( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchApplication"]: + """Lists all of the applications available in the specified Account. + + This operation returns only Applications and versions that are available for + use on Compute Nodes; that is, that can be used in an Package reference. For + administrator information about applications and versions that are not yet + available to Compute Nodes, use the Azure portal or the Azure Resource Manager + API. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :return: An iterator like instance of BatchApplication + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchApplication] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchApplication]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_applications_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchApplication], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_application( + self, + application_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchApplication: + """Gets information about the specified Application. + + This operation returns only Applications and versions that are available for + use on Compute Nodes; that is, that can be used in an Package reference. For + administrator information about Applications and versions that are not yet + available to Compute Nodes, use the Azure portal or the Azure Resource Manager + API. + + :param application_id: The ID of the Application. Required. + :type application_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchApplication. The BatchApplication is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchApplication + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchApplication] = kwargs.pop("cls", None) + + _request = build_batch_get_application_request( + application_id=application_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchApplication, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_pool_usage_metrics( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + starttime: Optional[datetime.datetime] = None, + endtime: Optional[datetime.datetime] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchPoolUsageMetrics"]: + """Lists the usage metrics, aggregated by Pool across individual time intervals, + for the specified Account. + + If you do not specify a $filter clause including a poolId, the response + includes all Pools that existed in the Account in the time range of the + returned aggregation intervals. If you do not specify a $filter clause + including a startTime or endTime these filters default to the start and end + times of the last aggregation interval currently available; that is, only the + last aggregation interval is returned. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword starttime: The earliest time from which to include metrics. This must be at least two + and + a half hours before the current time. If not specified this defaults to the + start time of the last aggregation interval currently available. Default value is None. + :paramtype starttime: ~datetime.datetime + :keyword endtime: The latest time from which to include metrics. This must be at least two + hours + before the current time. If not specified this defaults to the end time of the + last aggregation interval currently available. Default value is None. + :paramtype endtime: ~datetime.datetime + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchPoolUsageMetrics + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchPoolUsageMetrics] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchPoolUsageMetrics]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pool_usage_metrics_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + starttime=starttime, + endtime=endtime, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchPoolUsageMetrics], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def create_pool( + self, + pool: _azure_batch_models5.BatchPoolCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Pool to the specified Account. + + When naming Pools, avoid including sensitive information such as user names or + secret project names. This information may appear in telemetry logs accessible + to Microsoft Support engineers. + + :param pool: The Pool to be created. Required. + :type pool: ~azure.batch.models.BatchPoolCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_pool_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_pools( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchPool"]: + """Lists all of the Pools which be mounted. + + Lists all of the Pools which be mounted. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-pools + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchPool + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchPool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchPool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pools_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchPool], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def delete_pool( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Pool from the specified Account. + + When you request that a Pool be deleted, the following actions occur: the Pool + state is set to deleting; any ongoing resize operation on the Pool are stopped; + the Batch service starts resizing the Pool to zero Compute Nodes; any Tasks + running on existing Compute Nodes are terminated and requeued (as if a resize + Pool operation had been requested with the default requeue option); finally, + the Pool is removed from the system. Because running Tasks are requeued, the + user can rerun these Tasks by updating their Job to target a different Pool. + The Tasks can then run on the new Pool. If you want to override the requeue + behavior, then you should call resize Pool explicitly to shrink the Pool to + zero size before deleting the Pool. If you call an Update, Patch or Delete API + on a Pool in the deleting state, it will fail with HTTP status code 409 with + error code PoolBeingDeleted. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def pool_exists( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Gets basic properties of a Pool. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_pool_exists_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def get_pool( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchPool: + """Gets information about the specified Pool. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchPool. The BatchPool is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchPool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchPool] = kwargs.pop("cls", None) + + _request = build_batch_get_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchPool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def update_pool( + self, + pool_id: str, + pool: _azure_batch_models5.BatchPoolUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Pool. + + This only replaces the Pool properties specified in the request. For example, + if the Pool has a StartTask associated with it, and a request does not specify + a StartTask element, then the Pool keeps the existing StartTask. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param pool: The pool properties to update. Required. + :type pool: ~azure.batch.models.BatchPoolUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def disable_pool_auto_scale( + self, pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any + ) -> None: + """Disables automatic scaling for a Pool. + + Disables automatic scaling for a Pool. + + :param pool_id: The ID of the Pool on which to disable automatic scaling. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_disable_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def enable_pool_auto_scale( + self, + pool_id: str, + content: _azure_batch_models5.BatchPoolEnableAutoScaleContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables automatic scaling for a Pool. + + You cannot enable automatic scaling on a Pool if a resize operation is in + progress on the Pool. If automatic scaling of the Pool is currently disabled, + you must specify a valid autoscale formula as part of the request. If automatic + scaling of the Pool is already enabled, you may specify a new autoscale formula + and/or a new evaluation interval. You cannot call this API for the same Pool + more than once every 30 seconds. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for enabling automatic scaling. Required. + :type content: ~azure.batch.models.BatchPoolEnableAutoScaleContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_enable_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def evaluate_pool_auto_scale( + self, + pool_id: str, + content: _azure_batch_models5.BatchPoolEvaluateAutoScaleContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.AutoScaleRun: + """Gets the result of evaluating an automatic scaling formula on the Pool. + + This API is primarily for validating an autoscale formula, as it simply returns + the result without applying the formula to the Pool. The Pool must have auto + scaling enabled in order to evaluate a formula. + + :param pool_id: The ID of the Pool on which to evaluate the automatic scaling formula. + Required. + :type pool_id: str + :param content: The options to use for evaluating the automatic scaling formula. Required. + :type content: ~azure.batch.models.BatchPoolEvaluateAutoScaleContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: AutoScaleRun. The AutoScaleRun is compatible with MutableMapping + :rtype: ~azure.batch.models.AutoScaleRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models5.AutoScaleRun] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_evaluate_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.AutoScaleRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def resize_pool( + self, + pool_id: str, + content: _azure_batch_models5.BatchPoolResizeContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Changes the number of Compute Nodes that are assigned to a Pool. + + You can only resize a Pool when its allocation state is steady. If the Pool is + already resizing, the request fails with status code 409. When you resize a + Pool, the Pool's allocation state changes from steady to resizing. You cannot + resize Pools which are configured for automatic scaling. If you try to do this, + the Batch service returns an error 409. If you resize a Pool downwards, the + Batch service chooses which Compute Nodes to remove. To remove specific Compute + Nodes, use the Pool remove Compute Nodes API instead. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for resizing the pool. Required. + :type content: ~azure.batch.models.BatchPoolResizeContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_resize_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def stop_pool_resize( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Stops an ongoing resize operation on the Pool. + + This does not restore the Pool to its previous state before the resize + operation: it only stops any further changes being made, and the Pool maintains + its current state. After stopping, the Pool stabilizes at the number of Compute + Nodes it was at when the stop operation was done. During the stop operation, + the Pool allocation state changes first to stopping and then to steady. A + resize operation need not be an explicit resize Pool request; this API can also + be used to halt the initial sizing of the Pool when it is created. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_stop_pool_resize_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def replace_pool_properties( + self, + pool_id: str, + pool: _azure_batch_models5.BatchPoolReplaceContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Pool. + + This fully replaces all the updatable properties of the Pool. For example, if + the Pool has a StartTask associated with it and if StartTask is not specified + with this request, then the Batch service will remove the existing StartTask. + + :param pool_id: The ID of the Pool to update. Required. + :type pool_id: str + :param pool: The options to use for replacing properties on the pool. Required. + :type pool: ~azure.batch.models.BatchPoolReplaceContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_pool_properties_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def remove_nodes( + self, + pool_id: str, + content: _azure_batch_models5.BatchNodeRemoveContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Removes Compute Nodes from the specified Pool. + + This operation can only run when the allocation state of the Pool is steady. + When this operation runs, the allocation state changes from steady to resizing. + Each request may remove up to 100 nodes. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for removing the node. Required. + :type content: ~azure.batch.models.BatchNodeRemoveContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_remove_nodes_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_supported_images( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchSupportedImage"]: + """Lists all Virtual Machine Images supported by the Azure Batch service. + + Lists all Virtual Machine Images supported by the Azure Batch service. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchSupportedImage + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchSupportedImage] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchSupportedImage]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_supported_images_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchSupportedImage], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_pool_node_counts( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchPoolNodeCounts"]: + """Gets the number of Compute Nodes in each state, grouped by Pool. Note that the + numbers returned may not always be up to date. If you need exact node counts, + use a list query. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchPoolNodeCounts + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchPoolNodeCounts] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchPoolNodeCounts]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pool_node_counts_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchPoolNodeCounts], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def delete_job( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Job. + + Deleting a Job also deletes all Tasks that are part of that Job, and all Job + statistics. This also overrides the retention period for Task data; that is, if + the Job contains Tasks which are still retained on Compute Nodes, the Batch + services deletes those Tasks' working directories and all their contents. When + a Delete Job request is received, the Batch service sets the Job to the + deleting state. All update operations on a Job that is in deleting state will + fail with status code 409 (Conflict), with additional information indicating + that the Job is being deleted. + + :param job_id: The ID of the Job to delete. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will delete the Job even if the corresponding nodes have + not fully processed the deletion. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_job( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchJob: + """Gets information about the specified Job. + + Gets information about the specified Job. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchJob. The BatchJob is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchJob + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchJob] = kwargs.pop("cls", None) + + _request = build_batch_get_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchJob, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def update_job( + self, + job_id: str, + job: _azure_batch_models5.BatchJobUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job. + + This replaces only the Job properties specified in the request. For example, if + the Job has constraints, and a request does not specify the constraints + element, then the Job keeps the existing constraints. + + :param job_id: The ID of the Job whose properties you want to update. Required. + :type job_id: str + :param job: The options to use for updating the Job. Required. + :type job: ~azure.batch.models.BatchJobUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def replace_job( + self, + job_id: str, + job: _azure_batch_models5.BatchJob, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job. + + This fully replaces all the updatable properties of the Job. For example, if + the Job has constraints associated with it and if constraints is not specified + with this request, then the Batch service will remove the existing constraints. + + :param job_id: The ID of the Job whose properties you want to update. Required. + :type job_id: str + :param job: A job with updated properties. Required. + :type job: ~azure.batch.models.BatchJob + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def disable_job( + self, + job_id: str, + content: _azure_batch_models5.BatchJobDisableContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Disables the specified Job, preventing new Tasks from running. + + The Batch Service immediately moves the Job to the disabling state. Batch then + uses the disableTasks parameter to determine what to do with the currently + running Tasks of the Job. The Job remains in the disabling state until the + disable operation is completed and all Tasks have been dealt with according to + the disableTasks option; the Job then moves to the disabled state. No new Tasks + are started under the Job until it moves back to active state. If you try to + disable a Job that is in any state other than active, disabling, or disabled, + the request fails with status code 409. + + :param job_id: The ID of the Job to disable. Required. + :type job_id: str + :param content: The options to use for disabling the Job. Required. + :type content: ~azure.batch.models.BatchJobDisableContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_disable_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def enable_job( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables the specified Job, allowing new Tasks to run. + + When you call this API, the Batch service sets a disabled Job to the enabling + state. After the this operation is completed, the Job moves to the active + state, and scheduling of new Tasks under the Job resumes. The Batch service + does not allow a Task to remain in the active state for more than 180 days. + Therefore, if you enable a Job containing active Tasks which were added more + than 180 days ago, those Tasks will not run. + + :param job_id: The ID of the Job to enable. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def terminate_job( + self, + job_id: str, + parameters: Optional[_azure_batch_models5.BatchJobTerminateContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates the specified Job, marking it as completed. + + When a Terminate Job request is received, the Batch service sets the Job to the + terminating state. The Batch service then terminates any running Tasks + associated with the Job and runs any required Job release Tasks. Then the Job + moves into the completed state. If there are any Tasks in the Job in the active + state, they will remain in the active state. Once a Job is terminated, new + Tasks cannot be added and any remaining active Tasks will not be scheduled. + + :param job_id: The ID of the Job to terminate. Required. + :type job_id: str + :param parameters: The options to use for terminating the Job. Default value is None. + :type parameters: ~azure.batch.models.BatchJobTerminateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will terminate the Job even if the corresponding nodes have + not fully processed the termination. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_terminate_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def create_job( + self, + job: _azure_batch_models5.BatchJobCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Job to the specified Account. + + The Batch service supports two ways to control the work done as part of a Job. + In the first approach, the user specifies a Job Manager Task. The Batch service + launches this Task when it is ready to start the Job. The Job Manager Task + controls all other Tasks that run under this Job, by using the Task APIs. In + the second approach, the user directly controls the execution of Tasks under an + active Job, by using the Task APIs. Also note: when naming Jobs, avoid + including sensitive information such as user names or secret project names. + This information may appear in telemetry logs accessible to Microsoft Support + engineers. + + :param job: The Job to be created. Required. + :type job: ~azure.batch.models.BatchJobCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_job_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_jobs( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchJob"]: + """Lists all of the Jobs in the specified Account. + + Lists all of the Jobs in the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs + `_. Default + value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJob + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchJob]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_jobs_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchJob], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_jobs_from_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchJob"]: + """Lists the Jobs that have been created under the specified Job Schedule. + + Lists the Jobs that have been created under the specified Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule from which you want to get a list of Jobs. + Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJob + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchJob]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_jobs_from_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchJob], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_job_preparation_and_release_task_status( # pylint: disable=name-too-long + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchJobPreparationAndReleaseTaskStatus"]: + """Lists the execution status of the Job Preparation and Job Release Task for the + specified Job across the Compute Nodes where the Job has run. + + This API returns the Job Preparation and Job Release Task status on all Compute + Nodes that have run the Job Preparation or Job Release Task. This includes + Compute Nodes which have since been removed from the Pool. If this API is + invoked on a Job which has no Job Preparation or Job Release Task, the Batch + service returns HTTP status code 409 (Conflict) with an error code of + JobPreparationTaskNotSpecified. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchJobPreparationAndReleaseTaskStatus + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchJobPreparationAndReleaseTaskStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchJobPreparationAndReleaseTaskStatus]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_job_preparation_and_release_task_status_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_azure_batch_models5.BatchJobPreparationAndReleaseTaskStatus], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_job_task_counts( + self, job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any + ) -> _azure_batch_models5.BatchTaskCountsResult: + """Gets the Task counts for the specified Job. + + Task counts provide a count of the Tasks by active, running or completed Task + state, and a count of Tasks which succeeded or failed. Tasks in the preparing + state are counted as running. Note that the numbers returned may not always be + up to date. If you need exact task counts, use a list query. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchTaskCountsResult. The BatchTaskCountsResult is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchTaskCountsResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchTaskCountsResult] = kwargs.pop("cls", None) + + _request = build_batch_get_job_task_counts_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchTaskCountsResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def create_certificate( + self, + certificate: _azure_batch_models5.BatchCertificate, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Certificate to the specified Account. + + Creates a Certificate to the specified Account. + + :param certificate: The Certificate to be created. Required. + :type certificate: ~azure.batch.models.BatchCertificate + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(certificate, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_certificate_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_certificates( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchCertificate"]: + """Lists all of the Certificates that have been added to the specified Account. + + Lists all of the Certificates that have been added to the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-certificates + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchCertificate + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchCertificate] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchCertificate]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_certificates_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchCertificate], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def cancel_certificate_deletion( + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Cancels a failed deletion of a Certificate from the specified Account. + + If you try to delete a Certificate that is being used by a Pool or Compute + Node, the status of the Certificate changes to deleteFailed. If you decide that + you want to continue using the Certificate, you can use this operation to set + the status of the Certificate back to active. If you intend to delete the + Certificate, you do not need to run this operation after the deletion failed. + You must make sure that the Certificate is not being used by any resources, and + then you can try again to delete the Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate being deleted. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_cancel_certificate_deletion_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def delete_certificate( + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deletes a Certificate from the specified Account. + + You cannot delete a Certificate if a resource (Pool or Compute Node) is using + it. Before you can delete a Certificate, you must therefore make sure that the + Certificate is not associated with any existing Pools, the Certificate is not + installed on any Nodes (even if you remove a Certificate from a Pool, it is not + removed from existing Compute Nodes in that Pool until they restart), and no + running Tasks depend on the Certificate. If you try to delete a Certificate + that is in use, the deletion fails. The Certificate status changes to + deleteFailed. You can use Cancel Delete Certificate to set the status back to + active if you decide that you want to continue using the Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate to be deleted. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_certificate_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_certificate( + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchCertificate: + """Gets information about the specified Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate to get. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchCertificate. The BatchCertificate is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchCertificate + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchCertificate] = kwargs.pop("cls", None) + + _request = build_batch_get_certificate_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchCertificate, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def job_schedule_exists( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Checks the specified Job Schedule exists. + + Checks the specified Job Schedule exists. + + :param job_schedule_id: The ID of the Job Schedule which you want to check. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_job_schedule_exists_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def delete_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Job Schedule from the specified Account. + + When you delete a Job Schedule, this also deletes all Jobs and Tasks under that + schedule. When Tasks are deleted, all the files in their working directories on + the Compute Nodes are also deleted (the retention period is ignored). The Job + Schedule statistics are no longer accessible once the Job Schedule is deleted, + though they are still counted towards Account lifetime statistics. + + :param job_schedule_id: The ID of the Job Schedule to delete. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will delete the JobSchedule even if the corresponding nodes + have not fully processed the deletion. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchJobSchedule: + """Gets information about the specified Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to get. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchJobSchedule. The BatchJobSchedule is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchJobSchedule + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchJobSchedule] = kwargs.pop("cls", None) + + _request = build_batch_get_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchJobSchedule, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def update_job_schedule( + self, + job_schedule_id: str, + job_schedule: _azure_batch_models5.BatchJobScheduleUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job Schedule. + + This replaces only the Job Schedule properties specified in the request. For + example, if the schedule property is not specified with this request, then the + Batch service will keep the existing schedule. Changes to a Job Schedule only + impact Jobs created by the schedule after the update has taken place; currently + running Jobs are unaffected. + + :param job_schedule_id: The ID of the Job Schedule to update. Required. + :type job_schedule_id: str + :param job_schedule: The options to use for updating the Job Schedule. Required. + :type job_schedule: ~azure.batch.models.BatchJobScheduleUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def replace_job_schedule( + self, + job_schedule_id: str, + job_schedule: _azure_batch_models5.BatchJobSchedule, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job Schedule. + + This fully replaces all the updatable properties of the Job Schedule. For + example, if the schedule property is not specified with this request, then the + Batch service will remove the existing schedule. Changes to a Job Schedule only + impact Jobs created by the schedule after the update has taken place; currently + running Jobs are unaffected. + + :param job_schedule_id: The ID of the Job Schedule to update. Required. + :type job_schedule_id: str + :param job_schedule: A Job Schedule with updated properties. Required. + :type job_schedule: ~azure.batch.models.BatchJobSchedule + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def disable_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Disables a Job Schedule. + + No new Jobs will be created until the Job Schedule is enabled again. + + :param job_schedule_id: The ID of the Job Schedule to disable. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_disable_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def enable_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables a Job Schedule. + + Enables a Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to enable. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def terminate_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates a Job Schedule. + + Terminates a Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to terminates. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will terminate the JobSchedule even if the corresponding + nodes have not fully processed the termination. The default value is false. Default value is + None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_terminate_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def create_job_schedule( + self, + job_schedule: _azure_batch_models5.BatchJobScheduleCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Job Schedule to the specified Account. + + Creates a Job Schedule to the specified Account. + + :param job_schedule: The Job Schedule to be created. Required. + :type job_schedule: ~azure.batch.models.BatchJobScheduleCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_job_schedule_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_job_schedules( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchJobSchedule"]: + """Lists all of the Job Schedules in the specified Account. + + Lists all of the Job Schedules in the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-schedules + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJobSchedule + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchJobSchedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchJobSchedule]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_job_schedules_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchJobSchedule], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def create_task( + self, + job_id: str, + task: _azure_batch_models5.BatchTaskCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Task to the specified Job. + + The maximum lifetime of a Task from addition to completion is 180 days. If a + Task has not completed within 180 days of being added it will be terminated by + the Batch service and left in whatever state it was in at that time. + + :param job_id: The ID of the Job to which the Task is to be created. Required. + :type job_id: str + :param task: The Task to be created. Required. + :type task: ~azure.batch.models.BatchTaskCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(task, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_task_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_tasks( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchTask"]: + """Lists all of the Tasks that are associated with the specified Job. + + For multi-instance Tasks, information such as affinityId, executionInfo and + nodeInfo refer to the primary Task. Use the list subtasks API to retrieve + information about subtasks. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-tasks + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchTask + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchTask] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchTask]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_tasks_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchTask], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def create_task_collection( + self, + job_id: str, + task_collection: _azure_batch_models5.BatchTaskGroup, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchTaskAddCollectionResult: + """Adds a collection of Tasks to the specified Job. + + Note that each Task must have a unique ID. The Batch service may not return the + results for each Task in the same order the Tasks were submitted in this + request. If the server times out or the connection is closed during the + request, the request may have been partially or fully processed, or not at all. + In such cases, the user should re-issue the request. Note that it is up to the + user to correctly handle failures when re-issuing a request. For example, you + should use the same Task IDs during a retry so that if the prior operation + succeeded, the retry will not create extra Tasks unexpectedly. If the response + contains any Tasks which failed to add, a client can retry the request. In a + retry, it is most efficient to resubmit only Tasks that failed to add, and to + omit Tasks that were successfully added on the first attempt. The maximum + lifetime of a Task from addition to completion is 180 days. If a Task has not + completed within 180 days of being added it will be terminated by the Batch + service and left in whatever state it was in at that time. + + :param job_id: The ID of the Job to which the Task collection is to be added. Required. + :type job_id: str + :param task_collection: The Tasks to be added. Required. + :type task_collection: ~azure.batch.models.BatchTaskGroup + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchTaskAddCollectionResult. The BatchTaskAddCollectionResult is compatible with + MutableMapping + :rtype: ~azure.batch.models.BatchTaskAddCollectionResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models5.BatchTaskAddCollectionResult] = kwargs.pop("cls", None) + + _content = json.dumps(task_collection, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_task_collection_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchTaskAddCollectionResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Task from the specified Job. + + When a Task is deleted, all of the files in its directory on the Compute Node + where it ran are also deleted (regardless of the retention time). For + multi-instance Tasks, the delete Task operation applies synchronously to the + primary task; subtasks and their files are then deleted asynchronously in the + background. + + :param job_id: The ID of the Job from which to delete the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to delete. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchTask: + """Gets information about the specified Task. + + For multi-instance Tasks, information such as affinityId, executionInfo and + nodeInfo refer to the primary Task. Use the list subtasks API to retrieve + information about subtasks. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to get information about. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchTask. The BatchTask is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchTask + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchTask] = kwargs.pop("cls", None) + + _request = build_batch_get_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchTask, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def replace_task( + self, + job_id: str, + task_id: str, + task: _azure_batch_models5.BatchTask, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Task. + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to update. Required. + :type task_id: str + :param task: The Task to update. Required. + :type task: ~azure.batch.models.BatchTask + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(task, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_sub_tasks( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchSubtask"]: + """Lists all of the subtasks that are associated with the specified multi-instance + Task. + + If the Task is not a multi-instance Task then this returns an empty collection. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :param task_id: The ID of the Task. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchSubtask + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchSubtask] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchSubtask]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_sub_tasks_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchSubtask], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def terminate_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates the specified Task. + + When the Task has been terminated, it moves to the completed state. For + multi-instance Tasks, the terminate Task operation applies synchronously to the + primary task; subtasks are then terminated asynchronously in the background. + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to terminate. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_terminate_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def reactivate_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Reactivates a Task, allowing it to run again even if its retry count has been + exhausted. + + Reactivation makes a Task eligible to be retried again up to its maximum retry + count. The Task's state is changed to active. As the Task is no longer in the + completed state, any previous exit code or failure information is no longer + available after reactivation. Each time a Task is reactivated, its retry count + is reset to 0. Reactivation will fail for Tasks that are not completed or that + previously completed successfully (with an exit code of 0). Additionally, it + will fail if the Job has completed (or is terminating or deleting). + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to reactivate. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_reactivate_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def delete_task_file( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> None: + """Deletes the specified Task file from the Compute Node where the Task ran. + + Deletes the specified Task file from the Compute Node where the Task ran. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword recursive: Whether to delete children of a directory. If the filePath parameter + represents + a directory instead of a file, you can set recursive to true to delete the + directory and all of the files and subdirectories in it. If recursive is false + then the directory must be empty or deletion will fail. Default value is None. + :paramtype recursive: bool + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_task_file_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_task_file( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + """Returns the content of the specified Task file. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword ocp_range: The byte range to be retrieved. The default is to retrieve the entire file. + The + format is bytes=startRange-endRange. Default value is None. + :paramtype ocp_range: str + :return: AsyncIterator[bytes] + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_batch_get_task_file_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + ocp_range=ocp_range, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + response_headers["content-type"] = self._deserialize("str", response.headers.get("content-type")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_task_file_properties( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> bool: + """Gets the properties of the specified Task file. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_get_task_file_properties_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def list_task_files( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchNodeFile"]: + """Lists the files in a Task's directory on its Compute Node. + + Lists the files in a Task's directory on its Compute Node. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose files you want to list. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-task-files + `_. + Default value is None. + :paramtype filter: str + :keyword recursive: Whether to list children of the Task directory. This parameter can be used + in + combination with the filter parameter to list specific type of files. Default value is None. + :paramtype recursive: bool + :return: An iterator like instance of BatchNodeFile + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchNodeFile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchNodeFile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_task_files_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchNodeFile], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def create_node_user( + self, + pool_id: str, + node_id: str, + user: _azure_batch_models5.BatchNodeUserCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Adds a user Account to the specified Compute Node. + + You can add a user Account to a Compute Node only when it is in the idle or + running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to create a user Account. Required. + :type node_id: str + :param user: The options to use for creating the user. Required. + :type user: ~azure.batch.models.BatchNodeUserCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(user, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_node_user_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def delete_node_user( + self, + pool_id: str, + node_id: str, + user_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deletes a user Account from the specified Compute Node. + + You can delete a user Account to a Compute Node only when it is in the idle or + running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to delete a user Account. Required. + :type node_id: str + :param user_name: The name of the user Account to delete. Required. + :type user_name: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_node_user_request( + pool_id=pool_id, + node_id=node_id, + user_name=user_name, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def replace_node_user( + self, + pool_id: str, + node_id: str, + user_name: str, + content: _azure_batch_models5.BatchNodeUserUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Updates the password and expiration time of a user Account on the specified Compute Node. + + This operation replaces of all the updatable properties of the Account. For + example, if the expiryTime element is not specified, the current value is + replaced with the default value, not left unmodified. You can update a user + Account on a Compute Node only when it is in the idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to update a user Account. Required. + :type node_id: str + :param user_name: The name of the user Account to update. Required. + :type user_name: str + :param content: The options to use for updating the user. Required. + :type content: ~azure.batch.models.BatchNodeUserUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_node_user_request( + pool_id=pool_id, + node_id=node_id, + user_name=user_name, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_node( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchNode: + """Gets information about the specified Compute Node. + + Gets information about the specified Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to get information about. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchNode. The BatchNode is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchNode + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchNode] = kwargs.pop("cls", None) + + _request = build_batch_get_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchNode, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def reboot_node( + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models5.BatchNodeRebootContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Restarts the specified Compute Node. + + You can restart a Compute Node only if it is in an idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for rebooting the Compute Node. Default value is None. + :type parameters: ~azure.batch.models.BatchNodeRebootContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_reboot_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def start_node( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Starts the specified Compute Node. + + You can start a Compute Node only if it has been deallocated. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_start_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def reimage_node( + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models5.BatchNodeReimageContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Reinstalls the operating system on the specified Compute Node. + + You can reinstall the operating system on a Compute Node only if it is in an + idle or running state. This API can be invoked only on Pools created with the + cloud service configuration property. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for reimaging the Compute Node. Default value is None. + :type parameters: ~azure.batch.models.BatchNodeReimageContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_reimage_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def deallocate_node( + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models5.BatchNodeDeallocateContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deallocates the specified Compute Node. + + You can deallocate a Compute Node only if it is in an idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for deallocating the Compute Node. Default value is None. + :type parameters: ~azure.batch.models.BatchNodeDeallocateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_deallocate_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def disable_node_scheduling( + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models5.BatchNodeDisableSchedulingContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Disables Task scheduling on the specified Compute Node. + + You can disable Task scheduling on a Compute Node only if its current + scheduling state is enabled. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node on which you want to disable Task scheduling. + Required. + :type node_id: str + :param parameters: The options to use for disabling scheduling on the Compute Node. Default + value is None. + :type parameters: ~azure.batch.models.BatchNodeDisableSchedulingContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_disable_node_scheduling_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def enable_node_scheduling( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Enables Task scheduling on the specified Compute Node. + + You can enable Task scheduling on a Compute Node only if its current scheduling + state is disabled. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node on which you want to enable Task scheduling. + Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_node_scheduling_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_node_remote_login_settings( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchNodeRemoteLoginSettings: + """Gets the settings required for remote login to a Compute Node. + + Before you can remotely login to a Compute Node using the remote login settings, + you must create a user Account on the Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node for which to obtain the remote login settings. + Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchNodeRemoteLoginSettings. The BatchNodeRemoteLoginSettings is compatible with + MutableMapping + :rtype: ~azure.batch.models.BatchNodeRemoteLoginSettings + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchNodeRemoteLoginSettings] = kwargs.pop("cls", None) + + _request = build_batch_get_node_remote_login_settings_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchNodeRemoteLoginSettings, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def upload_node_logs( + self, + pool_id: str, + node_id: str, + content: _azure_batch_models5.UploadBatchServiceLogsContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.UploadBatchServiceLogsResult: + """Upload Azure Batch service log files from the specified Compute Node to Azure + Blob Storage. + + This is for gathering Azure Batch service log files in an automated fashion + from Compute Nodes if you are experiencing an error and wish to escalate to + Azure support. The Azure Batch service log files should be shared with Azure + support to aid in debugging issues with the Batch service. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node for which you want to get the Remote Desktop + Protocol file. Required. + :type node_id: str + :param content: The Azure Batch service log files upload options. Required. + :type content: ~azure.batch.models.UploadBatchServiceLogsContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: UploadBatchServiceLogsResult. The UploadBatchServiceLogsResult is compatible with + MutableMapping + :rtype: ~azure.batch.models.UploadBatchServiceLogsResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models5.UploadBatchServiceLogsResult] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_upload_node_logs_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.UploadBatchServiceLogsResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_nodes( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchNode"]: + """Lists the Compute Nodes in the specified Pool. + + Lists the Compute Nodes in the specified Pool. + + :param pool_id: The ID of the Pool from which you want to list Compute Nodes. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchNode + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchNode] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchNode]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_nodes_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchNode], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_node_extension( + self, + pool_id: str, + node_id: str, + extension_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchNodeVMExtension: + """Gets information about the specified Compute Node Extension. + + Gets information about the specified Compute Node Extension. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that contains the extensions. Required. + :type node_id: str + :param extension_name: The name of the Compute Node Extension that you want to get information + about. Required. + :type extension_name: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchNodeVMExtension. The BatchNodeVMExtension is compatible with MutableMapping + :rtype: ~azure.batch.models.BatchNodeVMExtension + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchNodeVMExtension] = kwargs.pop("cls", None) + + _request = build_batch_get_node_extension_request( + pool_id=pool_id, + node_id=node_id, + extension_name=extension_name, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchNodeVMExtension, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_node_extensions( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchNodeVMExtension"]: + """Lists the Compute Nodes Extensions in the specified Pool. + + Lists the Compute Nodes Extensions in the specified Pool. + + :param pool_id: The ID of the Pool that contains Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to list extensions. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchNodeVMExtension + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchNodeVMExtension] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchNodeVMExtension]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_node_extensions_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchNodeVMExtension], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def delete_node_file( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> None: + """Deletes the specified file from the Compute Node. + + Deletes the specified file from the Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword recursive: Whether to delete children of a directory. If the filePath parameter + represents + a directory instead of a file, you can set recursive to true to delete the + directory and all of the files and subdirectories in it. If recursive is false + then the directory must be empty or deletion will fail. Default value is None. + :paramtype recursive: bool + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_node_file_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_node_file( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + """Returns the content of the specified Compute Node file. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword ocp_range: The byte range to be retrieved. The default is to retrieve the entire file. + The + format is bytes=startRange-endRange. Default value is None. + :paramtype ocp_range: str + :return: AsyncIterator[bytes] + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_batch_get_node_file_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + ocp_range=ocp_range, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + response_headers["content-type"] = self._deserialize("str", response.headers.get("content-type")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_node_file_properties( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> bool: + """Gets the properties of the specified Compute Node file. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_get_node_file_properties_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def list_node_files( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> AsyncItemPaged["_azure_batch_models5.BatchNodeFile"]: + """Lists all of the files in Task directories on the specified Compute Node. + + Lists all of the files in Task directories on the specified Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node whose files you want to list. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files + `_. + Default value is None. + :paramtype filter: str + :keyword recursive: Whether to list children of a directory. Default value is None. + :paramtype recursive: bool + :return: An iterator like instance of BatchNodeFile + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.batch.models.BatchNodeFile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchNodeFile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_node_files_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchNodeFile], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/batch/azure-batch/client/aio/_operations/_patch.py b/sdk/batch/azure-batch/client/aio/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/batch/azure-batch/client/aio/_patch.py b/sdk/batch/azure-batch/client/aio/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/batch/azure-batch/client/py.typed b/sdk/batch/azure-batch/client/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/batch/azure-batch/client/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/batch/azure-batch/setup.py b/sdk/batch/azure-batch/setup.py index c8218eb0bece..cc976a355867 100644 --- a/sdk/batch/azure-batch/setup.py +++ b/sdk/batch/azure-batch/setup.py @@ -5,7 +5,7 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# coding: utf-8 + import os import re @@ -14,9 +14,10 @@ PACKAGE_NAME = "azure-batch" PACKAGE_PPRINT_NAME = "Azure Batch" +PACKAGE_NAMESPACE = "client" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: @@ -29,7 +30,7 @@ setup( name=PACKAGE_NAME, version=version, - description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), long_description=open("README.md", "r").read(), long_description_content_type="text/markdown", license="MIT License", @@ -42,7 +43,6 @@ "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -53,18 +53,16 @@ packages=find_packages( exclude=[ "tests", - # Exclude packages that will be covered by PEP420 or nspkg - "azure", ] ), include_package_data=True, package_data={ - "azure.batch": ["py.typed"], + "client": ["py.typed"], }, install_requires=[ "isodate>=0.6.1", "azure-core>=1.30.0", "typing-extensions>=4.6.0", ], - python_requires=">=3.8", + python_requires=">=3.9", ) diff --git a/sdk/batch/azure-batch/tests/test_batch.py b/sdk/batch/azure-batch/tests/test_batch.py index 51e33cb2465b..cf98cfbd6c49 100644 --- a/sdk/batch/azure-batch/tests/test_batch.py +++ b/sdk/batch/azure-batch/tests/test_batch.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- @@ -85,7 +85,7 @@ async def assertCreateTasksError(self, code, func, *args, **kwargs): pytest.fail("Inner BatchErrorException expected but not exist") except Exception as err: pytest.fail("Expected CreateTasksError, instead got: {!r}".format(err)) - + @CachedResourceGroupPreparer(location=AZURE_LOCATION) @AccountPreparer(location=AZURE_LOCATION, batch_environment=BATCH_ENVIRONMENT) @pytest.mark.parametrize("BatchClient", [SyncBatchClient, AsyncBatchClient], ids=["sync", "async"]) diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_client.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_client.py index a452f0f59198..b8f8ac53b642 100644 --- a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_client.py +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_client.py @@ -16,14 +16,14 @@ from azure.core.rest import HttpRequest, HttpResponse from ._configuration import RadiologyInsightsClientConfiguration -from ._operations import RadiologyInsightsClientOperationsMixin +from ._operations._operations import _RadiologyInsightsClientOperationsMixin from ._utils.serialization import Deserializer, Serializer if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class RadiologyInsightsClient(RadiologyInsightsClientOperationsMixin): +class RadiologyInsightsClient(_RadiologyInsightsClientOperationsMixin): """RadiologyInsightsClient. :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_operations/__init__.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_operations/__init__.py index ba57f3464683..933fcd7d1b55 100644 --- a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_operations/__init__.py +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import RadiologyInsightsClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "RadiologyInsightsClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_operations/_operations.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_operations/_operations.py index 38fe5a005428..b1dd7a2a5cc0 100644 --- a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_operations/_operations.py +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_operations/_operations.py @@ -73,7 +73,7 @@ def build_radiology_insights_infer_radiology_insights_request( # pylint: disabl return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -class RadiologyInsightsClientOperationsMixin( +class _RadiologyInsightsClientOperationsMixin( ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], RadiologyInsightsClientConfiguration] ): diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_utils/model_base.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_utils/model_base.py index aaa6692b2346..49d5c7259389 100644 --- a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_utils/model_base.py +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/_utils/model_base.py @@ -1,4 +1,4 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_client.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_client.py index bf856719b5c9..253d78482ad9 100644 --- a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_client.py +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_client.py @@ -17,13 +17,13 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import RadiologyInsightsClientConfiguration -from ._operations import RadiologyInsightsClientOperationsMixin +from ._operations._operations import _RadiologyInsightsClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class RadiologyInsightsClient(RadiologyInsightsClientOperationsMixin): +class RadiologyInsightsClient(_RadiologyInsightsClientOperationsMixin): """RadiologyInsightsClient. :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_operations/__init__.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_operations/__init__.py index ba57f3464683..933fcd7d1b55 100644 --- a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_operations/__init__.py +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import RadiologyInsightsClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "RadiologyInsightsClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_operations/_operations.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_operations/_operations.py index 8457b5fdc4f6..85e8230e5a3f 100644 --- a/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_operations/_operations.py +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/azure/healthinsights/radiologyinsights/aio/_operations/_operations.py @@ -40,7 +40,7 @@ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RadiologyInsightsClientOperationsMixin( +class _RadiologyInsightsClientOperationsMixin( ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], RadiologyInsightsClientConfiguration] ): diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/conftest.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/conftest.py new file mode 100644 index 000000000000..9e422a805079 --- /dev/null +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/conftest.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + radiologyinsights_subscription_id = os.environ.get( + "RADIOLOGYINSIGHTS_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + radiologyinsights_tenant_id = os.environ.get("RADIOLOGYINSIGHTS_TENANT_ID", "00000000-0000-0000-0000-000000000000") + radiologyinsights_client_id = os.environ.get("RADIOLOGYINSIGHTS_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + radiologyinsights_client_secret = os.environ.get( + "RADIOLOGYINSIGHTS_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=radiologyinsights_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=radiologyinsights_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=radiologyinsights_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=radiologyinsights_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/test_radiology_insights.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/test_radiology_insights.py new file mode 100644 index 000000000000..c3eb1e692af3 --- /dev/null +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/test_radiology_insights.py @@ -0,0 +1,474 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import RadiologyInsightsClientTestBase, RadiologyInsightsPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRadiologyInsights(RadiologyInsightsClientTestBase): + @RadiologyInsightsPreparer() + @recorded_by_proxy + def test_begin_infer_radiology_insights(self, radiologyinsights_endpoint): + client = self.create_client(endpoint=radiologyinsights_endpoint) + response = client.begin_infer_radiology_insights( + id="str", + resource={ + "id": "str", + "status": "str", + "createdAt": "2020-02-20 00:00:00", + "error": ~azure.core.ODataV4Format, + "expiresAt": "2020-02-20 00:00:00", + "jobData": { + "patients": [ + { + "id": "str", + "details": { + "birthDate": "2020-02-20", + "clinicalInfo": [ + { + "resourceType": "str", + "id": "str", + "implicitRules": "str", + "language": "str", + "meta": { + "lastUpdated": "str", + "profile": ["str"], + "security": [ + { + "code": "str", + "display": "str", + "extension": [ + { + "url": "str", + "valueBoolean": bool, + "valueCodeableConcept": {"coding": [...], "text": "str"}, + "valueDateTime": "str", + "valueInteger": 0, + "valuePeriod": {"end": "str", "start": "str"}, + "valueQuantity": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "valueRange": { + "high": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "low": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueRatio": { + "denominator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "numerator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueReference": { + "display": "str", + "identifier": { + "assigner": ..., + "period": {"end": "str", "start": "str"}, + "system": "str", + "type": {"coding": [...], "text": "str"}, + "use": "str", + "value": "str", + }, + "reference": "str", + "type": "str", + }, + "valueSampledData": { + "dimensions": 0, + "origin": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "period": 0.0, + "data": "str", + "factor": 0.0, + "lowerLimit": 0.0, + "upperLimit": 0.0, + }, + "valueString": "str", + "valueTime": "12:30:00", + } + ], + "id": "str", + "system": "str", + "version": "str", + } + ], + "source": "str", + "tag": [ + { + "code": "str", + "display": "str", + "extension": [ + { + "url": "str", + "valueBoolean": bool, + "valueCodeableConcept": {"coding": [...], "text": "str"}, + "valueDateTime": "str", + "valueInteger": 0, + "valuePeriod": {"end": "str", "start": "str"}, + "valueQuantity": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "valueRange": { + "high": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "low": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueRatio": { + "denominator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "numerator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueReference": { + "display": "str", + "identifier": { + "assigner": ..., + "period": {"end": "str", "start": "str"}, + "system": "str", + "type": {"coding": [...], "text": "str"}, + "use": "str", + "value": "str", + }, + "reference": "str", + "type": "str", + }, + "valueSampledData": { + "dimensions": 0, + "origin": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "period": 0.0, + "data": "str", + "factor": 0.0, + "lowerLimit": 0.0, + "upperLimit": 0.0, + }, + "valueString": "str", + "valueTime": "12:30:00", + } + ], + "id": "str", + "system": "str", + "version": "str", + } + ], + "versionId": "str", + }, + } + ], + "sex": "str", + }, + "encounters": [ + { + "id": "str", + "class": "str", + "period": {"end": "2020-02-20 00:00:00", "start": "2020-02-20 00:00:00"}, + } + ], + "patientDocuments": [ + { + "content": {"sourceType": "str", "value": "str"}, + "id": "str", + "type": "str", + "administrativeMetadata": { + "encounterId": "str", + "orderedProcedures": [ + { + "code": { + "coding": [ + { + "code": "str", + "display": "str", + "extension": [ + { + "url": "str", + "valueBoolean": bool, + "valueCodeableConcept": ..., + "valueDateTime": "str", + "valueInteger": 0, + "valuePeriod": {"end": "str", "start": "str"}, + "valueQuantity": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "valueRange": { + "high": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "low": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueRatio": { + "denominator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "numerator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueReference": { + "display": "str", + "identifier": { + "assigner": ..., + "period": {"end": "str", "start": "str"}, + "system": "str", + "type": ..., + "use": "str", + "value": "str", + }, + "reference": "str", + "type": "str", + }, + "valueSampledData": { + "dimensions": 0, + "origin": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "period": 0.0, + "data": "str", + "factor": 0.0, + "lowerLimit": 0.0, + "upperLimit": 0.0, + }, + "valueString": "str", + "valueTime": "12:30:00", + } + ], + "id": "str", + "system": "str", + "version": "str", + } + ], + "text": "str", + }, + "description": "str", + "extension": [ + { + "url": "str", + "valueBoolean": bool, + "valueCodeableConcept": { + "coding": [ + { + "code": "str", + "display": "str", + "extension": [...], + "id": "str", + "system": "str", + "version": "str", + } + ], + "text": "str", + }, + "valueDateTime": "str", + "valueInteger": 0, + "valuePeriod": {"end": "str", "start": "str"}, + "valueQuantity": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "valueRange": { + "high": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "low": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueRatio": { + "denominator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "numerator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueReference": { + "display": "str", + "identifier": { + "assigner": ..., + "period": {"end": "str", "start": "str"}, + "system": "str", + "type": { + "coding": [ + { + "code": "str", + "display": "str", + "extension": [...], + "id": "str", + "system": "str", + "version": "str", + } + ], + "text": "str", + }, + "use": "str", + "value": "str", + }, + "reference": "str", + "type": "str", + }, + "valueSampledData": { + "dimensions": 0, + "origin": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "period": 0.0, + "data": "str", + "factor": 0.0, + "lowerLimit": 0.0, + "upperLimit": 0.0, + }, + "valueString": "str", + "valueTime": "12:30:00", + } + ], + } + ], + }, + "authors": [{"fullName": "str", "id": "str"}], + "clinicalType": "str", + "createdAt": "2020-02-20 00:00:00", + "language": "str", + "specialtyType": "str", + } + ], + } + ], + "configuration": { + "includeEvidence": bool, + "inferenceOptions": { + "findingOptions": {"provideFocusedSentenceEvidence": bool}, + "followupRecommendationOptions": { + "includeRecommendationsInReferences": bool, + "includeRecommendationsWithNoSpecifiedModality": bool, + "provideFocusedSentenceEvidence": bool, + }, + "guidanceOptions": {"showGuidanceInHistory": bool}, + "qualityMeasureOptions": {"measureTypes": ["str"]}, + }, + "inferenceTypes": ["str"], + "locale": "str", + "verbose": bool, + }, + }, + "result": { + "modelVersion": "str", + "patientResults": [{"inferences": ["radiology_insights_inference"], "patientId": "str"}], + }, + "updatedAt": "2020-02-20 00:00:00", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/test_radiology_insights_async.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/test_radiology_insights_async.py new file mode 100644 index 000000000000..432850edbc14 --- /dev/null +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/test_radiology_insights_async.py @@ -0,0 +1,486 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import RadiologyInsightsPreparer +from testpreparer_async import RadiologyInsightsClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRadiologyInsightsAsync(RadiologyInsightsClientTestBaseAsync): + @RadiologyInsightsPreparer() + @recorded_by_proxy_async + async def test_begin_infer_radiology_insights(self, radiologyinsights_endpoint): + client = self.create_async_client(endpoint=radiologyinsights_endpoint) + response = await ( + await client.begin_infer_radiology_insights( + id="str", + resource={ + "id": "str", + "status": "str", + "createdAt": "2020-02-20 00:00:00", + "error": ~azure.core.ODataV4Format, + "expiresAt": "2020-02-20 00:00:00", + "jobData": { + "patients": [ + { + "id": "str", + "details": { + "birthDate": "2020-02-20", + "clinicalInfo": [ + { + "resourceType": "str", + "id": "str", + "implicitRules": "str", + "language": "str", + "meta": { + "lastUpdated": "str", + "profile": ["str"], + "security": [ + { + "code": "str", + "display": "str", + "extension": [ + { + "url": "str", + "valueBoolean": bool, + "valueCodeableConcept": { + "coding": [...], + "text": "str", + }, + "valueDateTime": "str", + "valueInteger": 0, + "valuePeriod": {"end": "str", "start": "str"}, + "valueQuantity": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "valueRange": { + "high": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "low": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueRatio": { + "denominator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "numerator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueReference": { + "display": "str", + "identifier": { + "assigner": ..., + "period": {"end": "str", "start": "str"}, + "system": "str", + "type": {"coding": [...], "text": "str"}, + "use": "str", + "value": "str", + }, + "reference": "str", + "type": "str", + }, + "valueSampledData": { + "dimensions": 0, + "origin": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "period": 0.0, + "data": "str", + "factor": 0.0, + "lowerLimit": 0.0, + "upperLimit": 0.0, + }, + "valueString": "str", + "valueTime": "12:30:00", + } + ], + "id": "str", + "system": "str", + "version": "str", + } + ], + "source": "str", + "tag": [ + { + "code": "str", + "display": "str", + "extension": [ + { + "url": "str", + "valueBoolean": bool, + "valueCodeableConcept": { + "coding": [...], + "text": "str", + }, + "valueDateTime": "str", + "valueInteger": 0, + "valuePeriod": {"end": "str", "start": "str"}, + "valueQuantity": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "valueRange": { + "high": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "low": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueRatio": { + "denominator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "numerator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueReference": { + "display": "str", + "identifier": { + "assigner": ..., + "period": {"end": "str", "start": "str"}, + "system": "str", + "type": {"coding": [...], "text": "str"}, + "use": "str", + "value": "str", + }, + "reference": "str", + "type": "str", + }, + "valueSampledData": { + "dimensions": 0, + "origin": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "period": 0.0, + "data": "str", + "factor": 0.0, + "lowerLimit": 0.0, + "upperLimit": 0.0, + }, + "valueString": "str", + "valueTime": "12:30:00", + } + ], + "id": "str", + "system": "str", + "version": "str", + } + ], + "versionId": "str", + }, + } + ], + "sex": "str", + }, + "encounters": [ + { + "id": "str", + "class": "str", + "period": {"end": "2020-02-20 00:00:00", "start": "2020-02-20 00:00:00"}, + } + ], + "patientDocuments": [ + { + "content": {"sourceType": "str", "value": "str"}, + "id": "str", + "type": "str", + "administrativeMetadata": { + "encounterId": "str", + "orderedProcedures": [ + { + "code": { + "coding": [ + { + "code": "str", + "display": "str", + "extension": [ + { + "url": "str", + "valueBoolean": bool, + "valueCodeableConcept": ..., + "valueDateTime": "str", + "valueInteger": 0, + "valuePeriod": {"end": "str", "start": "str"}, + "valueQuantity": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "valueRange": { + "high": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "low": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueRatio": { + "denominator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "numerator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueReference": { + "display": "str", + "identifier": { + "assigner": ..., + "period": { + "end": "str", + "start": "str", + }, + "system": "str", + "type": ..., + "use": "str", + "value": "str", + }, + "reference": "str", + "type": "str", + }, + "valueSampledData": { + "dimensions": 0, + "origin": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "period": 0.0, + "data": "str", + "factor": 0.0, + "lowerLimit": 0.0, + "upperLimit": 0.0, + }, + "valueString": "str", + "valueTime": "12:30:00", + } + ], + "id": "str", + "system": "str", + "version": "str", + } + ], + "text": "str", + }, + "description": "str", + "extension": [ + { + "url": "str", + "valueBoolean": bool, + "valueCodeableConcept": { + "coding": [ + { + "code": "str", + "display": "str", + "extension": [...], + "id": "str", + "system": "str", + "version": "str", + } + ], + "text": "str", + }, + "valueDateTime": "str", + "valueInteger": 0, + "valuePeriod": {"end": "str", "start": "str"}, + "valueQuantity": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "valueRange": { + "high": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "low": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueRatio": { + "denominator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "numerator": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + }, + "valueReference": { + "display": "str", + "identifier": { + "assigner": ..., + "period": {"end": "str", "start": "str"}, + "system": "str", + "type": { + "coding": [ + { + "code": "str", + "display": "str", + "extension": [...], + "id": "str", + "system": "str", + "version": "str", + } + ], + "text": "str", + }, + "use": "str", + "value": "str", + }, + "reference": "str", + "type": "str", + }, + "valueSampledData": { + "dimensions": 0, + "origin": { + "code": "str", + "comparator": "str", + "system": "str", + "unit": "str", + "value": 0.0, + }, + "period": 0.0, + "data": "str", + "factor": 0.0, + "lowerLimit": 0.0, + "upperLimit": 0.0, + }, + "valueString": "str", + "valueTime": "12:30:00", + } + ], + } + ], + }, + "authors": [{"fullName": "str", "id": "str"}], + "clinicalType": "str", + "createdAt": "2020-02-20 00:00:00", + "language": "str", + "specialtyType": "str", + } + ], + } + ], + "configuration": { + "includeEvidence": bool, + "inferenceOptions": { + "findingOptions": {"provideFocusedSentenceEvidence": bool}, + "followupRecommendationOptions": { + "includeRecommendationsInReferences": bool, + "includeRecommendationsWithNoSpecifiedModality": bool, + "provideFocusedSentenceEvidence": bool, + }, + "guidanceOptions": {"showGuidanceInHistory": bool}, + "qualityMeasureOptions": {"measureTypes": ["str"]}, + }, + "inferenceTypes": ["str"], + "locale": "str", + "verbose": bool, + }, + }, + "result": { + "modelVersion": "str", + "patientResults": [{"inferences": ["radiology_insights_inference"], "patientId": "str"}], + }, + "updatedAt": "2020-02-20 00:00:00", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/testpreparer.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/testpreparer.py new file mode 100644 index 000000000000..ec55a05d610b --- /dev/null +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/testpreparer.py @@ -0,0 +1,26 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.healthinsights.radiologyinsights import RadiologyInsightsClient +from devtools_testutils import AzureRecordedTestCase, PowerShellPreparer +import functools + + +class RadiologyInsightsClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(RadiologyInsightsClient) + return self.create_client_from_credential( + RadiologyInsightsClient, + credential=credential, + endpoint=endpoint, + ) + + +RadiologyInsightsPreparer = functools.partial( + PowerShellPreparer, "radiologyinsights", radiologyinsights_endpoint="https://fake_radiologyinsights_endpoint.com" +) diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/testpreparer_async.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/testpreparer_async.py new file mode 100644 index 000000000000..d95a566f99fa --- /dev/null +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/generated_tests/testpreparer_async.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.healthinsights.radiologyinsights.aio import RadiologyInsightsClient +from devtools_testutils import AzureRecordedTestCase + + +class RadiologyInsightsClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(RadiologyInsightsClient, is_async=True) + return self.create_client_from_credential( + RadiologyInsightsClient, + credential=credential, + endpoint=endpoint, + ) diff --git a/sdk/healthinsights/azure-healthinsights-radiologyinsights/setup.py b/sdk/healthinsights/azure-healthinsights-radiologyinsights/setup.py index 7776e6ff7ab2..5a3dc3f30e25 100644 --- a/sdk/healthinsights/azure-healthinsights-radiologyinsights/setup.py +++ b/sdk/healthinsights/azure-healthinsights-radiologyinsights/setup.py @@ -14,9 +14,10 @@ PACKAGE_NAME = "azure-healthinsights-radiologyinsights" PACKAGE_PPRINT_NAME = "Azure Health Insights - Radiology Insights" +PACKAGE_NAMESPACE = "azure.healthinsights.radiologyinsights" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: diff --git a/sdk/keyvault/azure-keyvault-administration/MANIFEST.in b/sdk/keyvault/azure-keyvault-administration/MANIFEST.in index 903a5953d673..0cc8058bae0b 100644 --- a/sdk/keyvault/azure-keyvault-administration/MANIFEST.in +++ b/sdk/keyvault/azure-keyvault-administration/MANIFEST.in @@ -1,7 +1,8 @@ include *.md include LICENSE -include azure/keyvault/administration/py.typed +include azure/keyvault/administration/_generated/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py include azure/keyvault/__init__.py +include azure/keyvault/administration/__init__.py diff --git a/sdk/keyvault/azure-keyvault-administration/_metadata.json b/sdk/keyvault/azure-keyvault-administration/_metadata.json new file mode 100644 index 000000000000..06284fddac1b --- /dev/null +++ b/sdk/keyvault/azure-keyvault-administration/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "7.6" +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-administration/apiview-properties.json b/sdk/keyvault/azure-keyvault-administration/apiview-properties.json new file mode 100644 index 000000000000..320d39e0f4fe --- /dev/null +++ b/sdk/keyvault/azure-keyvault-administration/apiview-properties.json @@ -0,0 +1,70 @@ +{ + "CrossLanguagePackageId": "KeyVault", + "CrossLanguageDefinitionId": { + "azure.keyvault.administration._generated.models.FullBackupOperation": "KeyVault.FullBackupOperation", + "azure.keyvault.administration._generated.models.FullBackupOperationError": "KeyVault.FullBackupOperation.error.anonymous", + "azure.keyvault.administration._generated.models.KeyVaultError": "KeyVaultError", + "azure.keyvault.administration._generated.models.Permission": "KeyVault.Permission", + "azure.keyvault.administration._generated.models.PreBackupOperationParameters": "KeyVault.PreBackupOperationParameters", + "azure.keyvault.administration._generated.models.PreRestoreOperationParameters": "KeyVault.PreRestoreOperationParameters", + "azure.keyvault.administration._generated.models.RestoreOperation": "KeyVault.RestoreOperation", + "azure.keyvault.administration._generated.models.RestoreOperationParameters": "KeyVault.RestoreOperationParameters", + "azure.keyvault.administration._generated.models.RoleAssignment": "KeyVault.RoleAssignment", + "azure.keyvault.administration._generated.models.RoleAssignmentCreateParameters": "KeyVault.RoleAssignmentCreateParameters", + "azure.keyvault.administration._generated.models.RoleAssignmentProperties": "KeyVault.RoleAssignmentProperties", + "azure.keyvault.administration._generated.models.RoleAssignmentPropertiesWithScope": "KeyVault.RoleAssignmentPropertiesWithScope", + "azure.keyvault.administration._generated.models.RoleDefinition": "KeyVault.RoleDefinition", + "azure.keyvault.administration._generated.models.RoleDefinitionCreateParameters": "KeyVault.RoleDefinitionCreateParameters", + "azure.keyvault.administration._generated.models.RoleDefinitionProperties": "KeyVault.RoleDefinitionProperties", + "azure.keyvault.administration._generated.models.SASTokenParameter": "KeyVault.SASTokenParameter", + "azure.keyvault.administration._generated.models.SelectiveKeyRestoreOperation": "KeyVault.SelectiveKeyRestoreOperation", + "azure.keyvault.administration._generated.models.SelectiveKeyRestoreOperationParameters": "KeyVault.SelectiveKeyRestoreOperationParameters", + "azure.keyvault.administration._generated.models.Setting": "KeyVault.Setting", + "azure.keyvault.administration._generated.models.SettingsListResult": "KeyVault.SettingsListResult", + "azure.keyvault.administration._generated.models.UpdateSettingRequest": "KeyVault.UpdateSettingRequest", + "azure.keyvault.administration._generated.models.RoleDefinitionType": "KeyVault.RoleDefinitionType", + "azure.keyvault.administration._generated.models.RoleType": "KeyVault.RoleType", + "azure.keyvault.administration._generated.models.DataAction": "KeyVault.DataAction", + "azure.keyvault.administration._generated.models.RoleScope": "KeyVault.RoleScope", + "azure.keyvault.administration._generated.models.OperationStatus": "KeyVault.OperationStatus", + "azure.keyvault.administration._generated.models.SettingTypeEnum": "KeyVault.SettingTypeEnum", + "azure.keyvault.administration._generated.operations.RoleDefinitionsOperations.delete": "KeyVault.RoleDefinitions.delete", + "azure.keyvault.administration._generated.aio.operations.RoleDefinitionsOperations.delete": "KeyVault.RoleDefinitions.delete", + "azure.keyvault.administration._generated.operations.RoleDefinitionsOperations.create_or_update": "KeyVault.RoleDefinitions.createOrUpdate", + "azure.keyvault.administration._generated.aio.operations.RoleDefinitionsOperations.create_or_update": "KeyVault.RoleDefinitions.createOrUpdate", + "azure.keyvault.administration._generated.operations.RoleDefinitionsOperations.get": "KeyVault.RoleDefinitions.get", + "azure.keyvault.administration._generated.aio.operations.RoleDefinitionsOperations.get": "KeyVault.RoleDefinitions.get", + "azure.keyvault.administration._generated.operations.RoleDefinitionsOperations.list": "KeyVault.RoleDefinitions.list", + "azure.keyvault.administration._generated.aio.operations.RoleDefinitionsOperations.list": "KeyVault.RoleDefinitions.list", + "azure.keyvault.administration._generated.operations.RoleAssignmentsOperations.delete": "KeyVault.RoleAssignments.delete", + "azure.keyvault.administration._generated.aio.operations.RoleAssignmentsOperations.delete": "KeyVault.RoleAssignments.delete", + "azure.keyvault.administration._generated.operations.RoleAssignmentsOperations.create": "KeyVault.RoleAssignments.create", + "azure.keyvault.administration._generated.aio.operations.RoleAssignmentsOperations.create": "KeyVault.RoleAssignments.create", + "azure.keyvault.administration._generated.operations.RoleAssignmentsOperations.get": "KeyVault.RoleAssignments.get", + "azure.keyvault.administration._generated.aio.operations.RoleAssignmentsOperations.get": "KeyVault.RoleAssignments.get", + "azure.keyvault.administration._generated.operations.RoleAssignmentsOperations.list_for_scope": "KeyVault.RoleAssignments.listForScope", + "azure.keyvault.administration._generated.aio.operations.RoleAssignmentsOperations.list_for_scope": "KeyVault.RoleAssignments.listForScope", + "azure.keyvault.administration._generated.KeyVaultClient.full_backup_status": "KeyVault.fullBackupStatus", + "azure.keyvault.administration._generated.aio.KeyVaultClient.full_backup_status": "KeyVault.fullBackupStatus", + "azure.keyvault.administration._generated.KeyVaultClient.begin_full_backup": "KeyVault.fullBackup", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_full_backup": "KeyVault.fullBackup", + "azure.keyvault.administration._generated.KeyVaultClient.begin_pre_full_backup": "KeyVault.preFullBackup", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_pre_full_backup": "KeyVault.preFullBackup", + "azure.keyvault.administration._generated.KeyVaultClient.restore_status": "KeyVault.restoreStatus", + "azure.keyvault.administration._generated.aio.KeyVaultClient.restore_status": "KeyVault.restoreStatus", + "azure.keyvault.administration._generated.KeyVaultClient.begin_full_restore_operation": "KeyVault.fullRestoreOperation", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_full_restore_operation": "KeyVault.fullRestoreOperation", + "azure.keyvault.administration._generated.KeyVaultClient.begin_pre_full_restore_operation": "KeyVault.preFullRestoreOperation", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_pre_full_restore_operation": "KeyVault.preFullRestoreOperation", + "azure.keyvault.administration._generated.KeyVaultClient.selective_key_restore_status": "KeyVault.selectiveKeyRestoreStatus", + "azure.keyvault.administration._generated.aio.KeyVaultClient.selective_key_restore_status": "KeyVault.selectiveKeyRestoreStatus", + "azure.keyvault.administration._generated.KeyVaultClient.begin_selective_key_restore_operation": "KeyVault.selectiveKeyRestoreOperation", + "azure.keyvault.administration._generated.aio.KeyVaultClient.begin_selective_key_restore_operation": "KeyVault.selectiveKeyRestoreOperation", + "azure.keyvault.administration._generated.KeyVaultClient.update_setting": "KeyVault.updateSetting", + "azure.keyvault.administration._generated.aio.KeyVaultClient.update_setting": "KeyVault.updateSetting", + "azure.keyvault.administration._generated.KeyVaultClient.get_setting": "KeyVault.getSetting", + "azure.keyvault.administration._generated.aio.KeyVaultClient.get_setting": "KeyVault.getSetting", + "azure.keyvault.administration._generated.KeyVaultClient.get_settings": "KeyVault.getSettings", + "azure.keyvault.administration._generated.aio.KeyVaultClient.get_settings": "KeyVault.getSettings" + } +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-administration/azure/__init__.py b/sdk/keyvault/azure-keyvault-administration/azure/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/__init__.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/__init__.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/__init__.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/__init__.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/__init__.py index 5e6b3233b362..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/__init__.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/__init__.py @@ -1,37 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -from ._access_control_client import KeyVaultAccessControlClient -from ._backup_client import KeyVaultBackupClient -from ._enums import KeyVaultRoleScope, KeyVaultDataAction, KeyVaultSettingType -from ._internal.client_base import ApiVersion -from ._models import ( - KeyVaultBackupResult, - KeyVaultPermission, - KeyVaultRoleAssignment, - KeyVaultRoleAssignmentProperties, - KeyVaultRoleDefinition, - KeyVaultSetting, -) -from ._settings_client import KeyVaultSettingsClient - - -__all__ = [ - "ApiVersion", - "KeyVaultBackupResult", - "KeyVaultAccessControlClient", - "KeyVaultBackupClient", - "KeyVaultDataAction", - "KeyVaultPermission", - "KeyVaultRoleAssignment", - "KeyVaultRoleAssignmentProperties", - "KeyVaultRoleDefinition", - "KeyVaultRoleScope", - "KeyVaultSetting", - "KeyVaultSettingsClient", - "KeyVaultSettingType", -] - -from ._version import VERSION -__version__ = VERSION +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_access_control_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_access_control_client.py index f41733a1a07a..390fcaf0c4ad 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_access_control_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_access_control_client.py @@ -67,15 +67,10 @@ def create_role_assignment( assignment_name = name or uuid4() create_parameters = RoleAssignmentCreateParameters( - properties=RoleAssignmentProperties( - principal_id=principal_id, role_definition_id=str(definition_id) - ) + properties=RoleAssignmentProperties(principal_id=principal_id, role_definition_id=str(definition_id)) ) assignment = self._client.role_assignments.create( - scope=scope, - role_assignment_name=str(assignment_name), - parameters=create_parameters, - **kwargs + scope=scope, role_assignment_name=str(assignment_name), parameters=create_parameters, **kwargs ) return KeyVaultRoleAssignment._from_generated(assignment) @@ -95,9 +90,7 @@ def delete_role_assignment( :rtype: None """ try: - self._client.role_assignments.delete( - scope=scope, role_assignment_name=str(name), **kwargs - ) + self._client.role_assignments.delete(scope=scope, role_assignment_name=str(name), **kwargs) except ResourceNotFoundError: pass @@ -116,9 +109,7 @@ def get_role_assignment( :returns: The fetched role assignment. :rtype: ~azure.keyvault.administration.KeyVaultRoleAssignment """ - assignment = self._client.role_assignments.get( - scope=scope, role_assignment_name=str(name), **kwargs - ) + assignment = self._client.role_assignments.get(scope=scope, role_assignment_name=str(name), **kwargs) return KeyVaultRoleAssignment._from_generated(assignment) @distributed_trace @@ -135,9 +126,7 @@ def list_role_assignments( :rtype: ~azure.core.paging.ItemPaged[~azure.keyvault.administration.KeyVaultRoleAssignment] """ return self._client.role_assignments.list_for_scope( - scope=scope, - cls=lambda result: [KeyVaultRoleAssignment._from_generated(a) for a in result], - **kwargs + scope=scope, cls=lambda result: [KeyVaultRoleAssignment._from_generated(a) for a in result], **kwargs ) @distributed_trace @@ -198,10 +187,7 @@ def set_role_definition( parameters = RoleDefinitionCreateParameters(properties=properties) definition = self._client.role_definitions.create_or_update( - scope=scope, - role_definition_name=str(name or uuid4()), - parameters=parameters, - **kwargs + scope=scope, role_definition_name=str(name or uuid4()), parameters=parameters, **kwargs ) return KeyVaultRoleDefinition._from_generated(definition) @@ -220,9 +206,7 @@ def get_role_definition( :returns: The fetched role definition. :rtype: ~azure.keyvault.administration.KeyVaultRoleDefinition """ - definition = self._client.role_definitions.get( - scope=scope, role_definition_name=str(name), **kwargs - ) + definition = self._client.role_definitions.get(scope=scope, role_definition_name=str(name), **kwargs) return KeyVaultRoleDefinition._from_generated(definition) @distributed_trace @@ -241,9 +225,7 @@ def delete_role_definition( :rtype: None """ try: - self._client.role_definitions.delete( - scope=scope, role_definition_name=str(name), **kwargs - ) + self._client.role_definitions.delete(scope=scope, role_definition_name=str(name), **kwargs) except ResourceNotFoundError: pass @@ -261,9 +243,7 @@ def list_role_definitions( :rtype: ~azure.core.paging.ItemPaged[~azure.keyvault.administration.KeyVaultRoleDefinition] """ return self._client.role_definitions.list( - scope=scope, - cls=lambda result: [KeyVaultRoleDefinition._from_generated(d) for d in result], - **kwargs + scope=scope, cls=lambda result: [KeyVaultRoleDefinition._from_generated(d) for d in result], **kwargs ) def __enter__(self) -> "KeyVaultAccessControlClient": diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_backup_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_backup_client.py index 714907cdf182..495ef624b061 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_backup_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_backup_client.py @@ -51,9 +51,7 @@ def _use_continuation_token(self, continuation_token: str, status_method: Callab + "operation poller's continuation_token() method" ) from ex - pipeline_response = status_method( - job_id=job_id, cls=lambda pipeline_response, _, __: pipeline_response - ) + pipeline_response = status_method(job_id=job_id, cls=lambda pipeline_response, _, __: pipeline_response) if "azure-asyncoperation" not in pipeline_response.http_response.headers: pipeline_response.http_response.headers["azure-asyncoperation"] = status_url return base64.b64encode(pickle.dumps(pipeline_response)).decode("ascii") @@ -66,8 +64,7 @@ def begin_backup( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[KeyVaultBackupResult]: - ... + ) -> LROPoller[KeyVaultBackupResult]: ... @overload def begin_backup( @@ -77,8 +74,7 @@ def begin_backup( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[KeyVaultBackupResult]: - ... + ) -> LROPoller[KeyVaultBackupResult]: ... # Disabling pylint checks because they don't correctly handle overloads @distributed_trace @@ -145,8 +141,7 @@ def begin_restore( key_name: Optional[str] = None, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @overload def begin_restore( @@ -157,8 +152,7 @@ def begin_restore( key_name: Optional[str] = None, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... # Disabling pylint checks because they don't correctly handle overloads @distributed_trace @@ -250,8 +244,7 @@ def begin_pre_backup( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @overload def begin_pre_backup( @@ -261,8 +254,7 @@ def begin_pre_backup( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @distributed_trace def begin_pre_backup( # pylint: disable=docstring-keyword-should-match-keyword-only @@ -318,8 +310,7 @@ def begin_pre_restore( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @overload def begin_pre_restore( @@ -329,8 +320,7 @@ def begin_pre_restore( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> LROPoller[None]: - ... + ) -> LROPoller[None]: ... @distributed_trace def begin_pre_restore( # pylint: disable=docstring-keyword-should-match-keyword-only diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_client.py index bc155465c5f3..2aab678d87ca 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_client.py @@ -16,13 +16,14 @@ from ._configuration import KeyVaultClientConfiguration from ._utils.serialization import Deserializer, Serializer -from .operations import KeyVaultClientOperationsMixin, RoleAssignmentsOperations, RoleDefinitionsOperations +from .operations import RoleAssignmentsOperations, RoleDefinitionsOperations +from .operations._operations import _KeyVaultClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_validation.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_validation.py index 752b2822f9d3..f5af3a4eb8a2 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_validation.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/_validation.py @@ -10,6 +10,22 @@ def api_version_validation(**kwargs): params_added_on = kwargs.pop("params_added_on", {}) method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default def decorator(func): @functools.wraps(func) @@ -21,7 +37,7 @@ def wrapper(*args, **kwargs): except AttributeError: return func(*args, **kwargs) - if method_added_on > client_api_version: + if _index_with_default(method_added_on) > _index_with_default(client_api_version): raise ValueError( f"'{func.__name__}' is not available in API version " f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." @@ -31,7 +47,7 @@ def wrapper(*args, **kwargs): parameter: api_version for api_version, parameters in params_added_on.items() for parameter in parameters - if parameter in kwargs and api_version > client_api_version + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) } if unsupported: raise ValueError( diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_client.py index f6eec1768ab3..998652c3ba7a 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/_client.py @@ -16,13 +16,14 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import KeyVaultClientConfiguration -from .operations import KeyVaultClientOperationsMixin, RoleAssignmentsOperations, RoleDefinitionsOperations +from .operations import RoleAssignmentsOperations, RoleDefinitionsOperations +from .operations._operations import _KeyVaultClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/__init__.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/__init__.py index 2318933b2c83..d112b365af8c 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/__init__.py @@ -14,7 +14,6 @@ from ._operations import RoleDefinitionsOperations # type: ignore from ._operations import RoleAssignmentsOperations # type: ignore -from ._operations import KeyVaultClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * @@ -23,7 +22,6 @@ __all__ = [ "RoleDefinitionsOperations", "RoleAssignmentsOperations", - "KeyVaultClientOperationsMixin", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/_operations.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/_operations.py index 814c54aa59b5..d2564c4d7db4 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/aio/operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload import urllib.parse from azure.core import AsyncPipelineClient @@ -392,7 +392,7 @@ async def get(self, scope: str, role_definition_name: str, **kwargs: Any) -> _mo @distributed_trace def list( self, scope: str, *, filter: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.RoleDefinition"]: + ) -> AsyncItemPaged["_models.RoleDefinition"]: """Get all role definitions that are applicable at scope and above. :param scope: The scope of the role definition. Required. @@ -805,7 +805,7 @@ async def get(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _mo @distributed_trace def list_for_scope( self, scope: str, *, filter: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.RoleAssignment"]: + ) -> AsyncItemPaged["_models.RoleAssignment"]: """Gets role assignments for a scope. :param scope: The scope of the role assignments. Required. @@ -897,7 +897,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) -class KeyVaultClientOperationsMixin( +class _KeyVaultClientOperationsMixin( ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], KeyVaultClientConfiguration] ): @@ -1174,8 +1174,9 @@ def get_long_running_output(pipeline_response): ) @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) async def _pre_full_backup_initial( self, @@ -1310,8 +1311,9 @@ async def begin_pre_full_backup( @distributed_trace_async @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) async def begin_pre_full_backup( self, @@ -1665,8 +1667,9 @@ def get_long_running_output(pipeline_response): ) @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) async def _pre_full_restore_operation_initial( self, @@ -1801,8 +1804,9 @@ async def begin_pre_full_restore_operation( @distributed_trace_async @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) async def begin_pre_full_restore_operation( self, diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/__init__.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/__init__.py index 2318933b2c83..d112b365af8c 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/__init__.py @@ -14,7 +14,6 @@ from ._operations import RoleDefinitionsOperations # type: ignore from ._operations import RoleAssignmentsOperations # type: ignore -from ._operations import KeyVaultClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * @@ -23,7 +22,6 @@ __all__ = [ "RoleDefinitionsOperations", "RoleAssignmentsOperations", - "KeyVaultClientOperationsMixin", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/_operations.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/_operations.py index 8c5d159ae978..1b0d6f6e19e1 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_generated/operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterator, List, Optional, TypeVar, Union, cast, overload import urllib.parse from azure.core import PipelineClient @@ -846,7 +846,7 @@ def get(self, scope: str, role_definition_name: str, **kwargs: Any) -> _models.R return deserialized # type: ignore @distributed_trace - def list(self, scope: str, *, filter: Optional[str] = None, **kwargs: Any) -> Iterable["_models.RoleDefinition"]: + def list(self, scope: str, *, filter: Optional[str] = None, **kwargs: Any) -> ItemPaged["_models.RoleDefinition"]: """Get all role definitions that are applicable at scope and above. :param scope: The scope of the role definition. Required. @@ -1259,7 +1259,7 @@ def get(self, scope: str, role_assignment_name: str, **kwargs: Any) -> _models.R @distributed_trace def list_for_scope( self, scope: str, *, filter: Optional[str] = None, **kwargs: Any - ) -> Iterable["_models.RoleAssignment"]: + ) -> ItemPaged["_models.RoleAssignment"]: """Gets role assignments for a scope. :param scope: The scope of the role assignments. Required. @@ -1351,7 +1351,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class KeyVaultClientOperationsMixin( +class _KeyVaultClientOperationsMixin( ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], KeyVaultClientConfiguration] ): @@ -1627,8 +1627,9 @@ def get_long_running_output(pipeline_response): ) @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) def _pre_full_backup_initial( self, @@ -1763,8 +1764,9 @@ def begin_pre_full_backup( @distributed_trace @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) def begin_pre_full_backup( self, @@ -2116,8 +2118,9 @@ def get_long_running_output(pipeline_response): ) @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) def _pre_full_restore_operation_initial( self, @@ -2252,8 +2255,9 @@ def begin_pre_full_restore_operation( @distributed_trace @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "content_type", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "content_type", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) def begin_pre_full_restore_operation( self, diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_challenge_auth_policy.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_challenge_auth_policy.py index dad851f8f58c..3e3ac1855178 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_challenge_auth_policy.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_challenge_auth_policy.py @@ -82,9 +82,7 @@ def __init__(self, credential: AsyncTokenProvider, *scopes: str, **kwargs: Any) self._verify_challenge_resource = kwargs.pop("verify_challenge_resource", True) self._request_copy: Optional[HttpRequest] = None - async def send( - self, request: PipelineRequest[HttpRequest] - ) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: + async def send(self, request: PipelineRequest[HttpRequest]) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: """Authorize request with a bearer token and send it to the next policy. We implement this method to account for the valid scenario where a Key Vault authentication challenge is @@ -155,7 +153,6 @@ async def handle_challenge_flow( await await_result(self.on_response, request, response) return response - async def on_request(self, request: PipelineRequest) -> None: _enforce_tls(request) challenge = ChallengeCache.get_challenge_for_url(request.http_request.url) @@ -184,7 +181,6 @@ async def on_request(self, request: PipelineRequest) -> None: bodiless_request.headers["Content-Length"] = "0" request.http_request = bodiless_request - async def on_challenge(self, request: PipelineRequest, response: PipelineResponse) -> bool: try: # CAE challenges may not include a scope or tenant; cache from the previous challenge to use if necessary @@ -227,9 +223,7 @@ async def on_challenge(self, request: PipelineRequest, response: PipelineRespons if challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs"): await self.authorize_request(request, scope, claims=challenge.claims) else: - await self.authorize_request( - request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id - ) + await self.authorize_request(request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id) return True diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_client_base.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_client_base.py index 367a99fae45b..f47722f72f97 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_client_base.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/async_client_base.py @@ -44,11 +44,7 @@ def __init__(self, vault_url: str, credential: AsyncTokenCredential, **kwargs: A http_logging_policy = HttpLoggingPolicy(**kwargs) http_logging_policy.allowed_header_names.update( - { - "x-ms-keyvault-network-info", - "x-ms-keyvault-region", - "x-ms-keyvault-service-version" - } + {"x-ms-keyvault-network-info", "x-ms-keyvault-region", "x-ms-keyvault-service-version"} ) verify_challenge = kwargs.pop("verify_challenge_resource", True) @@ -59,7 +55,7 @@ def __init__(self, vault_url: str, credential: AsyncTokenCredential, **kwargs: A authentication_policy=AsyncChallengeAuthPolicy(credential, verify_challenge_resource=verify_challenge), sdk_moniker=SDK_MONIKER, http_logging_policy=http_logging_policy, - **kwargs + **kwargs, ) self._models = _models except ValueError as exc: diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py index d0efb46a1ebd..77401fd15ba6 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/client_base.py @@ -103,7 +103,7 @@ def __init__(self, vault_url: str, credential: TokenCredential, **kwargs: Any) - authentication_policy=ChallengeAuthPolicy(credential, verify_challenge_resource=verify_challenge), sdk_moniker=SDK_MONIKER, http_logging_policy=http_logging_policy, - **kwargs + **kwargs, ) self._models = _models except ValueError as exc: diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/http_challenge.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/http_challenge.py index 0320df5a868b..8b14b999de78 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/http_challenge.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/http_challenge.py @@ -148,7 +148,9 @@ def supports_message_protection(self) -> bool: """ return self.supports_pop() and self.server_encryption_key and self.server_signature_key # type: ignore - def _validate_challenge(self, challenge: str) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use + def _validate_challenge( + self, challenge: str + ) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use """Verifies that the challenge is a valid auth challenge and returns the key=value pairs. :param str challenge: The WWW-Authenticate header of the challenge response. @@ -161,7 +163,9 @@ def _validate_challenge(self, challenge: str) -> str: # pylint:disable=bad-opti return challenge.strip() - def _validate_request_uri(self, uri: str) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use + def _validate_request_uri( + self, uri: str + ) -> str: # pylint:disable=bad-option-value,useless-option-value,no-self-use """Extracts the host authority from the given URI. :param str uri: The URI of the HTTP request that prompted the challenge. diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_models.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_models.py index b944ed0c3e91..d92c517b7510 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_models.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_models.py @@ -70,9 +70,11 @@ def _from_generated(cls, role_assignment: RoleAssignment) -> "KeyVaultRoleAssign role_assignment_id=role_assignment.id, name=role_assignment.name, assignment_type=role_assignment.type, - properties=KeyVaultRoleAssignmentProperties._from_generated(role_assignment.properties) - if role_assignment.properties - else KeyVaultRoleAssignmentProperties(), + properties=( + KeyVaultRoleAssignmentProperties._from_generated(role_assignment.properties) + if role_assignment.properties + else KeyVaultRoleAssignmentProperties() + ), ) @@ -144,9 +146,11 @@ def _from_generated(cls, definition: RoleDefinition) -> "KeyVaultRoleDefinition" description=definition.properties.description if definition.properties else None, id=definition.id, name=definition.name, - permissions=[KeyVaultPermission._from_generated(p) for p in definition.properties.permissions or []] - if definition.properties - else None, + permissions=( + [KeyVaultPermission._from_generated(p) for p in definition.properties.permissions or []] + if definition.properties + else None + ), role_name=definition.properties.role_name if definition.properties else None, role_type=definition.properties.role_type if definition.properties else None, type=definition.type, diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_settings_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_settings_client.py index ab4b5ffdbbff..67dad85e648e 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_settings_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_settings_client.py @@ -27,6 +27,7 @@ class KeyVaultSettingsClient(KeyVaultClientBase): :keyword bool verify_challenge_resource: Whether to verify the authentication challenge resource matches the Key Vault or Managed HSM domain. Defaults to True. """ + # pylint:disable=protected-access @distributed_trace @@ -75,11 +76,7 @@ def update_setting(self, setting: KeyVaultSetting, **kwargs: Any) -> KeyVaultSet :raises ~azure.core.exceptions.HttpResponseError: """ parameters = UpdateSettingRequest(value=setting.value) - result = self._client.update_setting( - setting_name=setting.name, - parameters=parameters, - **kwargs - ) + result = self._client.update_setting(setting_name=setting.name, parameters=parameters, **kwargs) return KeyVaultSetting._from_generated(result) def __enter__(self) -> "KeyVaultSettingsClient": diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_access_control_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_access_control_client.py index b6ce9fe5cae8..56fc0bb2c3f3 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_access_control_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_access_control_client.py @@ -68,15 +68,10 @@ async def create_role_assignment( assignment_name = name or uuid4() create_parameters = RoleAssignmentCreateParameters( - properties=RoleAssignmentProperties( - principal_id=principal_id, role_definition_id=str(definition_id) - ) + properties=RoleAssignmentProperties(principal_id=principal_id, role_definition_id=str(definition_id)) ) assignment = await self._client.role_assignments.create( - scope=scope, - role_assignment_name=str(assignment_name), - parameters=create_parameters, - **kwargs + scope=scope, role_assignment_name=str(assignment_name), parameters=create_parameters, **kwargs ) return KeyVaultRoleAssignment._from_generated(assignment) @@ -96,9 +91,7 @@ async def delete_role_assignment( :rtype: None """ try: - await self._client.role_assignments.delete( - scope=scope, role_assignment_name=str(name), **kwargs - ) + await self._client.role_assignments.delete(scope=scope, role_assignment_name=str(name), **kwargs) except ResourceNotFoundError: pass @@ -117,9 +110,7 @@ async def get_role_assignment( :returns: The fetched role assignment. :rtype: ~azure.keyvault.administration.KeyVaultRoleAssignment """ - assignment = await self._client.role_assignments.get( - scope=scope, role_assignment_name=str(name), **kwargs - ) + assignment = await self._client.role_assignments.get(scope=scope, role_assignment_name=str(name), **kwargs) return KeyVaultRoleAssignment._from_generated(assignment) @distributed_trace @@ -136,9 +127,7 @@ def list_role_assignments( :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.administration.KeyVaultRoleAssignment] """ return self._client.role_assignments.list_for_scope( - scope=scope, - cls=lambda result: [KeyVaultRoleAssignment._from_generated(a) for a in result], - **kwargs + scope=scope, cls=lambda result: [KeyVaultRoleAssignment._from_generated(a) for a in result], **kwargs ) @distributed_trace_async @@ -199,10 +188,7 @@ async def set_role_definition( parameters = RoleDefinitionCreateParameters(properties=properties) definition = await self._client.role_definitions.create_or_update( - scope=scope, - role_definition_name=str(name or uuid4()), - parameters=parameters, - **kwargs + scope=scope, role_definition_name=str(name or uuid4()), parameters=parameters, **kwargs ) return KeyVaultRoleDefinition._from_generated(definition) @@ -221,9 +207,7 @@ async def get_role_definition( :returns: The fetched role definition. :rtype: ~azure.keyvault.administration.KeyVaultRoleDefinition """ - definition = await self._client.role_definitions.get( - scope=scope, role_definition_name=str(name), **kwargs - ) + definition = await self._client.role_definitions.get(scope=scope, role_definition_name=str(name), **kwargs) return KeyVaultRoleDefinition._from_generated(definition) @distributed_trace_async @@ -242,9 +226,7 @@ async def delete_role_definition( :rtype: None """ try: - await self._client.role_definitions.delete( - scope=scope, role_definition_name=str(name), **kwargs - ) + await self._client.role_definitions.delete(scope=scope, role_definition_name=str(name), **kwargs) except ResourceNotFoundError: pass @@ -262,9 +244,7 @@ def list_role_definitions( :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.administration.KeyVaultRoleDefinition] """ return self._client.role_definitions.list( - scope=scope, - cls=lambda result: [KeyVaultRoleDefinition._from_generated(d) for d in result], - **kwargs + scope=scope, cls=lambda result: [KeyVaultRoleDefinition._from_generated(d) for d in result], **kwargs ) async def __aenter__(self) -> "KeyVaultAccessControlClient": diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_backup_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_backup_client.py index cc27d245b00e..ca5c7e681292 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_backup_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_backup_client.py @@ -46,9 +46,7 @@ async def _use_continuation_token(self, continuation_token: str, status_method: + "poller's continuation_token() method" ) from ex - pipeline_response = await status_method( - job_id=job_id, cls=lambda pipeline_response, _, __: pipeline_response - ) + pipeline_response = await status_method(job_id=job_id, cls=lambda pipeline_response, _, __: pipeline_response) if "azure-asyncoperation" not in pipeline_response.http_response.headers: pipeline_response.http_response.headers["azure-asyncoperation"] = status_url return base64.b64encode(pickle.dumps(pipeline_response)).decode("ascii") @@ -61,8 +59,7 @@ async def begin_backup( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[KeyVaultBackupResult]: - ... + ) -> AsyncLROPoller[KeyVaultBackupResult]: ... @overload async def begin_backup( @@ -72,8 +69,7 @@ async def begin_backup( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[KeyVaultBackupResult]: - ... + ) -> AsyncLROPoller[KeyVaultBackupResult]: ... # Disabling pylint checks because they don't correctly handle overloads @distributed_trace_async @@ -139,8 +135,7 @@ async def begin_restore( key_name: Optional[str] = None, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @overload async def begin_restore( @@ -151,8 +146,7 @@ async def begin_restore( key_name: Optional[str] = None, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... # Disabling pylint checks because they don't correctly handle overloads @distributed_trace_async @@ -245,8 +239,7 @@ async def begin_pre_backup( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @overload async def begin_pre_backup( @@ -256,8 +249,7 @@ async def begin_pre_backup( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @distributed_trace_async async def begin_pre_backup( # pylint: disable=docstring-keyword-should-match-keyword-only @@ -313,8 +305,7 @@ async def begin_pre_restore( use_managed_identity: Literal[True], continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @overload async def begin_pre_restore( @@ -324,8 +315,7 @@ async def begin_pre_restore( sas_token: str, continuation_token: Optional[str] = None, **kwargs: Any, - ) -> AsyncLROPoller[None]: - ... + ) -> AsyncLROPoller[None]: ... @distributed_trace_async async def begin_pre_restore( # pylint: disable=docstring-keyword-should-match-keyword-only diff --git a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_settings_client.py b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_settings_client.py index d1a96bb0bc66..3a6eba9acd30 100644 --- a/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_settings_client.py +++ b/sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/aio/_settings_client.py @@ -28,6 +28,7 @@ class KeyVaultSettingsClient(AsyncKeyVaultClientBase): :keyword bool verify_challenge_resource: Whether to verify the authentication challenge resource matches the Key Vault or Managed HSM domain. Defaults to True. """ + # pylint:disable=protected-access @distributed_trace_async @@ -78,11 +79,7 @@ async def update_setting(self, setting: KeyVaultSetting, **kwargs: Any) -> KeyVa :raises ~azure.core.exceptions.HttpResponseError: """ parameters = UpdateSettingRequest(value=setting.value) - result = await self._client.update_setting( - setting_name=setting.name, - parameters=parameters, - **kwargs - ) + result = await self._client.update_setting(setting_name=setting.name, parameters=parameters, **kwargs) return KeyVaultSetting._from_generated(result) async def __aenter__(self) -> "KeyVaultSettingsClient": diff --git a/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations.py b/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations.py index 4a88c14046d9..dec69877910c 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -12,7 +13,7 @@ # # 3. Set environment variable MANAGED_HSM_URL with the URL of your managed HSM and AZURE_CLIENT_ID with the ID of a # service principal -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -79,8 +80,7 @@ # [START update_a_role_definition] new_permissions = [ KeyVaultPermission( - data_actions=[KeyVaultDataAction.READ_HSM_KEY], - not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] + data_actions=[KeyVaultDataAction.READ_HSM_KEY], not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] ) ] unique_definition_name = role_definition.name diff --git a/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations_async.py b/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations_async.py index 4238ea22245d..f11d469b8ebf 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations_async.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/access_control_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -16,7 +17,7 @@ # 2. azure-keyvault-administration and azure-identity libraries (pip install these) # # 3. Set environment variable MANAGED_HSM_URL with the URL of your managed HSM -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -34,6 +35,7 @@ # 5. Delete a role definition (delete_role_definition) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): MANAGED_HSM_URL = os.environ["MANAGED_HSM_URL"] @@ -41,7 +43,7 @@ async def run_sample(): # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. credential = DefaultAzureCredential() client = KeyVaultAccessControlClient(vault_url=MANAGED_HSM_URL, credential=credential) - + # Let's first create a custom role definition. This role permits creating keys in a Managed HSM. # We'll provide a friendly role name, and let a unique role definition name (a GUID) be generated for us. print("\n.. Create a role definition") @@ -57,8 +59,7 @@ async def run_sample(): print("\n.. Update a role definition") new_permissions = [ KeyVaultPermission( - data_actions=[KeyVaultDataAction.READ_HSM_KEY], - not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] + data_actions=[KeyVaultDataAction.READ_HSM_KEY], not_data_actions=[KeyVaultDataAction.CREATE_HSM_KEY] ) ] unique_definition_name = role_definition.name diff --git a/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations.py b/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations.py index a36305134280..582b250e5f5f 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations.py @@ -17,7 +17,7 @@ # 4. A user-assigned managed identity that has access to your managed HSM. For more information about how to create a # user-assigned managed identity, refer to # https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/overview -# +# # 5. A storage account, that your managed identity has access to, containing a blob storage container # (See https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction) # diff --git a/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations_async.py b/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations_async.py index 1cba4d1b11ae..5d5318769300 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations_async.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/backup_restore_operations_async.py @@ -19,7 +19,7 @@ # 4. A user-assigned managed identity that has access to your managed HSM. For more information about how to create a # user-assigned managed identity, refer to # https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/overview -# +# # 5. A storage account, that your managed identity has access to, containing a blob storage container # (See https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction) # @@ -36,6 +36,7 @@ # 2. Perform a full restore (begin_restore) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): MANAGED_HSM_URL = os.environ["MANAGED_HSM_URL"] CONTAINER_URL = os.environ["CONTAINER_URL"] @@ -45,7 +46,7 @@ async def run_sample(): # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. credential = ManagedIdentityCredential(client_id=MANAGED_IDENTITY_CLIENT_ID) client = KeyVaultBackupClient(vault_url=MANAGED_HSM_URL, credential=credential) - + # Let's back up the vault with begin_backup, which returns a poller. Calling result() on the poller will return # a KeyVaultBackupResult that contains the URL of the backup after the operation completes. Calling wait() on # the poller will wait until the operation is complete. diff --git a/sdk/keyvault/azure-keyvault-administration/samples/settings_operations.py b/sdk/keyvault/azure-keyvault-administration/samples/settings_operations.py index 390e0d7b4e00..68461b4a3e3d 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/settings_operations.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/settings_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -13,7 +14,7 @@ # 2. azure-keyvault-administration and azure-identity libraries (pip install these) # # 3. Set environment variable MANAGED_HSM_URL with the URL of your managed HSM -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-administration/samples/settings_operations_async.py b/sdk/keyvault/azure-keyvault-administration/samples/settings_operations_async.py index 77a7070a7f44..a400f4889d12 100644 --- a/sdk/keyvault/azure-keyvault-administration/samples/settings_operations_async.py +++ b/sdk/keyvault/azure-keyvault-administration/samples/settings_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -16,7 +17,7 @@ # 2. azure-keyvault-administration and azure-identity libraries (pip install these) # # 3. Set environment variable MANAGED_HSM_URL with the URL of your managed HSM -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -28,6 +29,7 @@ # 2. Update a setting (update_setting) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): MANAGED_HSM_URL = os.environ["MANAGED_HSM_URL"] diff --git a/sdk/keyvault/azure-keyvault-administration/setup.py b/sdk/keyvault/azure-keyvault-administration/setup.py index 186dd18143f0..4909da6ec129 100644 --- a/sdk/keyvault/azure-keyvault-administration/setup.py +++ b/sdk/keyvault/azure-keyvault-administration/setup.py @@ -14,9 +14,10 @@ PACKAGE_NAME = "azure-keyvault-administration" PACKAGE_PPRINT_NAME = "Key Vault Administration" +PACKAGE_NAMESPACE = "azure.keyvault.administration._generated" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: @@ -29,7 +30,6 @@ setup( name=PACKAGE_NAME, version=version, - include_package_data=True, description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), long_description=open("README.md", "r").read(), long_description_content_type="text/markdown", @@ -47,22 +47,25 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", "License :: OSI Approved :: MIT License", ], zip_safe=False, packages=find_packages( exclude=[ - "samples", "tests", # Exclude packages that will be covered by PEP420 or nspkg "azure", "azure.keyvault", + "azure.keyvault.administration", ] ), + include_package_data=True, + package_data={ + "azure.keyvault.administration._generated": ["py.typed"], + }, install_requires=[ "isodate>=0.6.1", - "azure-core>=1.31.0", + "azure-core>=1.30.0", "typing-extensions>=4.6.0", ], python_requires=">=3.9", diff --git a/sdk/keyvault/azure-keyvault-administration/tests/_async_test_case.py b/sdk/keyvault/azure-keyvault-administration/tests/_async_test_case.py index 2ee8bb2fbdfb..ab91cde6bc9e 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/_async_test_case.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/_async_test_case.py @@ -62,6 +62,7 @@ async def _preparer(test_class, api_version, **kwargs): async with client: await fn(test_class, client, **kwargs) + return _preparer def create_backup_client(self, **kwargs): @@ -84,6 +85,7 @@ async def _preparer(test_class, api_version, **kwargs): async with client: await fn(test_class, client, **kwargs) + return _preparer def create_backup_client(self, **kwargs): @@ -103,11 +105,11 @@ async def _preparer(test_class, api_version, **kwargs): async with client: await fn(test_class, client, **kwargs) + return _preparer def create_access_control_client(self, **kwargs): - from azure.keyvault.administration.aio import \ - KeyVaultAccessControlClient + from azure.keyvault.administration.aio import KeyVaultAccessControlClient credential = self.get_credential(KeyVaultAccessControlClient, is_async=True) return self.create_client_from_credential( @@ -123,11 +125,11 @@ async def _preparer(test_class, api_version, **kwargs): async with client: await fn(test_class, client, **kwargs) + return _preparer def create_access_control_client(self, **kwargs): - from azure.keyvault.administration.aio import \ - KeyVaultSettingsClient + from azure.keyvault.administration.aio import KeyVaultSettingsClient credential = self.get_credential(KeyVaultSettingsClient, is_async=True) return self.create_client_from_credential( diff --git a/sdk/keyvault/azure-keyvault-administration/tests/_test_case.py b/sdk/keyvault/azure-keyvault-administration/tests/_test_case.py index 9befd375d178..dd07ecafd384 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/_test_case.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/_test_case.py @@ -55,7 +55,7 @@ def _set_mgmt_settings_real_values(self): class KeyVaultBackupClientPreparer(BaseClientPreparer): def __init__(self, **kwargs) -> None: - super().__init__(**kwargs) + super().__init__(**kwargs) def __call__(self, fn): def _preparer(test_class, api_version, **kwargs): @@ -66,6 +66,7 @@ def _preparer(test_class, api_version, **kwargs): with client: fn(test_class, client, **kwargs) + return _preparer def create_backup_client(self, **kwargs): @@ -79,7 +80,7 @@ def create_backup_client(self, **kwargs): class KeyVaultBackupClientSasPreparer(BaseClientPreparer): def __init__(self, **kwargs) -> None: - super().__init__(**kwargs) + super().__init__(**kwargs) def __call__(self, fn): def _preparer(test_class, api_version, **kwargs): @@ -91,6 +92,7 @@ def _preparer(test_class, api_version, **kwargs): with client: fn(test_class, client, **kwargs) + return _preparer def create_backup_client(self, **kwargs): @@ -113,6 +115,7 @@ def _preparer(test_class, api_version, **kwargs): with client: fn(test_class, client, **kwargs) + return _preparer def create_access_control_client(self, **kwargs): @@ -135,6 +138,7 @@ def _preparer(test_class, api_version, **kwargs): with client: fn(test_class, client, **kwargs) + return _preparer def create_settings_client(self, **kwargs): diff --git a/sdk/keyvault/azure-keyvault-administration/tests/conftest.py b/sdk/keyvault/azure-keyvault-administration/tests/conftest.py index 66f14f669a46..872fdb9d38f7 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/conftest.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/conftest.py @@ -16,7 +16,8 @@ remove_batch_sanitizers, ) -os.environ['PYTHONHASHSEED'] = '0' +os.environ["PYTHONHASHSEED"] = "0" + @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): @@ -24,22 +25,22 @@ def add_sanitizers(test_proxy): azure_keyvault_url = azure_keyvault_url.rstrip("/") keyvault_tenant_id = os.getenv("KEYVAULT_TENANT_ID", "keyvault_tenant_id") keyvault_subscription_id = os.getenv("KEYVAULT_SUBSCRIPTION_ID", "keyvault_subscription_id") - azure_managedhsm_url = os.environ.get("AZURE_MANAGEDHSM_URL","https://Sanitized.managedhsm.azure.net") + azure_managedhsm_url = os.environ.get("AZURE_MANAGEDHSM_URL", "https://Sanitized.managedhsm.azure.net") azure_managedhsm_url = azure_managedhsm_url.rstrip("/") - azure_attestation_uri = os.environ.get("AZURE_KEYVAULT_ATTESTATION_URL","https://Sanitized.azurewebsites.net") - azure_attestation_uri = azure_attestation_uri.rstrip('/') + azure_attestation_uri = os.environ.get("AZURE_KEYVAULT_ATTESTATION_URL", "https://Sanitized.azurewebsites.net") + azure_attestation_uri = azure_attestation_uri.rstrip("/") storage_url = os.environ.get("BLOB_STORAGE_URL", "https://Sanitized.blob.core.windows.net") client_id = os.environ.get("KEYVAULT_CLIENT_ID", "service-principal-id") - sas_token = os.environ.get("BLOB_STORAGE_SAS_TOKEN","fake-sas") + sas_token = os.environ.get("BLOB_STORAGE_SAS_TOKEN", "fake-sas") add_general_string_sanitizer(target=azure_keyvault_url, value="https://Sanitized.vault.azure.net") add_general_string_sanitizer(target=keyvault_tenant_id, value="00000000-0000-0000-0000-000000000000") add_general_string_sanitizer(target=keyvault_subscription_id, value="00000000-0000-0000-0000-000000000000") - add_general_string_sanitizer(target=azure_managedhsm_url,value="https://Sanitized.managedhsm.azure.net") - add_general_string_sanitizer(target=azure_attestation_uri,value="https://Sanitized.azurewebsites.net") + add_general_string_sanitizer(target=azure_managedhsm_url, value="https://Sanitized.managedhsm.azure.net") + add_general_string_sanitizer(target=azure_attestation_uri, value="https://Sanitized.azurewebsites.net") add_general_string_sanitizer(target=storage_url, value="https://Sanitized.blob.core.windows.net") add_general_string_sanitizer(target=sas_token, value="fake-sas") - add_general_string_sanitizer(target=client_id, value = "service-principal-id") + add_general_string_sanitizer(target=client_id, value="service-principal-id") # Sanitize API versions of `azure-keyvault-keys` requests add_uri_regex_sanitizer( regex="keys/([^/]*)/create\\?api-version=(\\S*)", value="keys/$1/create?api-version=sanitized" diff --git a/sdk/keyvault/azure-keyvault-administration/tests/perfstress_tests/get_role_definition.py b/sdk/keyvault/azure-keyvault-administration/tests/perfstress_tests/get_role_definition.py index 80730f6ae7d8..dd5dfb5a2add 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/perfstress_tests/get_role_definition.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/perfstress_tests/get_role_definition.py @@ -8,10 +8,10 @@ from azure.identity import DefaultAzureCredential from azure.identity.aio import DefaultAzureCredential as AsyncDefaultAzureCredential from azure.keyvault.administration import ( - KeyVaultAccessControlClient, + KeyVaultAccessControlClient, KeyVaultDataAction, KeyVaultPermission, - KeyVaultRoleScope, + KeyVaultRoleScope, ) from azure.keyvault.administration.aio import KeyVaultAccessControlClient as AsyncKeyVaultAccessControlClient @@ -32,7 +32,7 @@ def __init__(self, arguments): self.role_name = uuid.uuid4() self.scope = KeyVaultRoleScope.GLOBAL self.permissions = [KeyVaultPermission(data_actions=[KeyVaultDataAction.CREATE_HSM_KEY])] - + async def global_setup(self): """The global setup is run only once.""" await super().global_setup() @@ -42,7 +42,7 @@ async def global_cleanup(self): """The global cleanup is run only once.""" await self.async_client.delete_role_definition(scope=self.scope, name=self.role_name) await super().global_cleanup() - + async def close(self): """This is run after cleanup.""" await self.async_client.close() diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_access_control.py b/sdk/keyvault/azure-keyvault-administration/tests/test_access_control.py index eae2f81bb7ab..55c4d4619a9c 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_access_control.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_access_control.py @@ -47,11 +47,7 @@ def test_role_definitions(self, client, **kwargs): permissions = [KeyVaultPermission(data_actions=[KeyVaultDataAction.READ_HSM_KEY])] created_definition = client.set_role_definition( - scope=scope, - name=definition_name, - role_name=role_name, - description="test", - permissions=permissions + scope=scope, name=definition_name, role_name=role_name, description="test", permissions=permissions ) assert "/" in created_definition.assignable_scopes assert created_definition.role_name == role_name @@ -61,9 +57,7 @@ def test_role_definitions(self, client, **kwargs): assert created_definition.permissions[0].data_actions == [KeyVaultDataAction.READ_HSM_KEY] assert created_definition.assignable_scopes == [KeyVaultRoleScope.GLOBAL] # update custom role definition - permissions = [ - KeyVaultPermission(data_actions=[], not_data_actions=[KeyVaultDataAction.READ_HSM_KEY]) - ] + permissions = [KeyVaultPermission(data_actions=[], not_data_actions=[KeyVaultDataAction.READ_HSM_KEY])] role_name2 = self.get_resource_name("role-name2") updated_definition = client.set_role_definition( scope=scope, name=definition_name, role_name=role_name2, permissions=permissions @@ -106,14 +100,14 @@ def test_role_assignment(self, client, **kwargs): created = client.create_role_assignment(scope, definition.id, principal_id, name=name) assert created.name == name - #assert created.properties.principal_id == principal_id + # assert created.properties.principal_id == principal_id assert created.properties.role_definition_id == definition.id assert created.properties.scope == scope # should be able to get the new assignment got = client.get_role_assignment(scope, name) assert got.name == name - #assert got.properties.principal_id == principal_id + # assert got.properties.principal_id == principal_id assert got.properties.role_definition_id == definition.id assert got.properties.scope == scope diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_access_control_async.py b/sdk/keyvault/azure-keyvault-administration/tests/test_access_control_async.py index 3b8e8b0e412e..ab3237ae993d 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_access_control_async.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_access_control_async.py @@ -7,7 +7,7 @@ import uuid import pytest -from azure.keyvault.administration import KeyVaultDataAction, KeyVaultPermission,KeyVaultRoleScope +from azure.keyvault.administration import KeyVaultDataAction, KeyVaultPermission, KeyVaultRoleScope from devtools_testutils import add_general_regex_sanitizer, set_bodiless_matcher from devtools_testutils.aio import recorded_by_proxy_async @@ -51,11 +51,7 @@ async def test_role_definitions(self, client, **kwargs): add_general_regex_sanitizer(function_scoped=True, regex=definition_name, value="definition-name") permissions = [KeyVaultPermission(data_actions=[KeyVaultDataAction.READ_HSM_KEY])] created_definition = await client.set_role_definition( - scope=scope, - name=definition_name, - role_name=role_name, - description="test", - permissions=permissions + scope=scope, name=definition_name, role_name=role_name, description="test", permissions=permissions ) assert "/" in created_definition.assignable_scopes assert created_definition.role_name == role_name @@ -66,9 +62,7 @@ async def test_role_definitions(self, client, **kwargs): assert created_definition.assignable_scopes == [KeyVaultRoleScope.GLOBAL] # update custom role definition - permissions = [ - KeyVaultPermission(data_actions=[], not_data_actions=[KeyVaultDataAction.READ_HSM_KEY]) - ] + permissions = [KeyVaultPermission(data_actions=[], not_data_actions=[KeyVaultDataAction.READ_HSM_KEY])] role_name2 = self.get_resource_name("role-name2") updated_definition = await client.set_role_definition( scope=scope, name=definition_name, role_name=role_name2, permissions=permissions @@ -95,11 +89,10 @@ async def test_role_definitions(self, client, **kwargs): await client.delete_role_definition(scope, definition_name) async for d in client.list_role_definitions(scope): - assert (d.id != definition.id), "the role definition should have been deleted" + assert d.id != definition.id, "the role definition should have been deleted" if self.is_live: await asyncio.sleep(60) # additional waiting to avoid conflicts with resources in other tests - @pytest.mark.asyncio @pytest.mark.parametrize("api_version", all_api_versions) @KeyVaultAccessControlClientPreparer() @@ -119,14 +112,14 @@ async def test_role_assignment(self, client, **kwargs): created = await client.create_role_assignment(scope, definition.id, principal_id, name=name) assert created.name == name - #assert created.properties.principal_id == principal_id + # assert created.properties.principal_id == principal_id assert created.properties.role_definition_id == definition.id assert created.properties.scope == scope # should be able to get the new assignment got = await client.get_role_assignment(scope, name) assert got.name == name - #assert got.properties.principal_id == principal_id + # assert got.properties.principal_id == principal_id assert got.properties.role_definition_id == definition.id assert got.properties.scope == scope diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client.py b/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client.py index d5bd21ee0dfa..6f8f20155dc6 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client.py @@ -23,8 +23,9 @@ class TestBackupClientTests(KeyVaultTestCase): def create_key_client(self, vault_uri, **kwargs): from azure.keyvault.keys import KeyClient + credential = self.get_credential(KeyClient) - return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs ) + return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs) @pytest.mark.parametrize("api_version", only_default) @KeyVaultBackupClientPreparer() @@ -99,7 +100,6 @@ def test_selective_key_restore(self, client, **kwargs): key_name = self.get_resource_name("selective-restore-test-key") key_client.create_rsa_key(key_name) - # backup the vault container_uri = kwargs.pop("container_uri") backup_poller = client.begin_backup(container_uri, use_managed_identity=True) @@ -188,7 +188,9 @@ def test_backup_restore_sas(self, client: KeyVaultBackupClient, **kwargs): sas_token = kwargs.pop("sas_token") if self.is_live and not sas_token: - pytest.skip("SAS token is required for live tests. Please set the BLOB_STORAGE_SAS_TOKEN environment variable.") + pytest.skip( + "SAS token is required for live tests. Please set the BLOB_STORAGE_SAS_TOKEN environment variable." + ) client.begin_pre_backup(container_uri, sas_token=sas_token).wait() backup_poller = client.begin_backup(container_uri, sas_token) # Test positional SAS token for backwards compat diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client_async.py b/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client_async.py index 6dc34efe00a9..3f554d42ada9 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client_async.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_backup_client_async.py @@ -19,9 +19,10 @@ class TestBackupClientTests(KeyVaultTestCase): def create_key_client(self, vault_uri, **kwargs): - from azure.keyvault.keys.aio import KeyClient - credential = self.get_credential(KeyClient, is_async=True) - return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs ) + from azure.keyvault.keys.aio import KeyClient + + credential = self.get_credential(KeyClient, is_async=True) + return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs) @pytest.mark.asyncio @pytest.mark.parametrize("api_version", only_default) @@ -138,7 +139,7 @@ async def test_backup_client_polling(self, client, **kwargs): if self.is_live: assert backup_poller.status() == "InProgress" assert not backup_poller.done() or backup_poller.polling_method().finished() - #assert rehydrated.status() == "InProgress" + # assert rehydrated.status() == "InProgress" assert not rehydrated.done() or rehydrated.polling_method().finished() backup_operation = await backup_poller.result() @@ -168,7 +169,7 @@ async def test_backup_client_polling(self, client, **kwargs): if self.is_live: assert restore_poller.status() == "InProgress" assert not restore_poller.done() or restore_poller.polling_method().finished() - #assert rehydrated.status() == "InProgress" + # assert rehydrated.status() == "InProgress" assert not rehydrated.done() or rehydrated.polling_method().finished() await rehydrated.wait() @@ -189,7 +190,9 @@ async def test_backup_restore_sas(self, client, **kwargs): sas_token = kwargs.pop("sas_token") if self.is_live and not sas_token: - pytest.skip("SAS token is required for live tests. Please set the BLOB_STORAGE_SAS_TOKEN environment variable.") + pytest.skip( + "SAS token is required for live tests. Please set the BLOB_STORAGE_SAS_TOKEN environment variable." + ) check_poller = await client.begin_pre_backup(container_uri, sas_token=sas_token) await check_poller.wait() diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration.py b/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration.py index ddcb3d8876d1..4bb463ad28be 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration.py @@ -18,8 +18,9 @@ class TestExamplesTests(KeyVaultTestCase): def create_key_client(self, vault_uri, **kwargs): from azure.keyvault.keys import KeyClient + credential = self.get_credential(KeyClient) - return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs ) + return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs) @pytest.mark.parametrize("api_version", only_default) @KeyVaultBackupClientPreparer() @@ -63,7 +64,7 @@ def test_example_backup_and_restore(self, client, **kwargs): @pytest.mark.parametrize("api_version", only_default) @KeyVaultBackupClientPreparer() @recorded_by_proxy - def test_example_selective_key_restore(self, client,**kwargs): + def test_example_selective_key_restore(self, client, **kwargs): set_bodiless_matcher() # create a key to selectively restore managed_hsm_url = kwargs.pop("managed_hsm_url") diff --git a/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration_async.py b/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration_async.py index 31748fab87fe..c5ae8593b134 100644 --- a/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration_async.py +++ b/sdk/keyvault/azure-keyvault-administration/tests/test_examples_administration_async.py @@ -19,8 +19,9 @@ class TestExamplesTests(KeyVaultTestCase): def create_key_client(self, vault_uri, **kwargs): from azure.keyvault.keys.aio import KeyClient + credential = self.get_credential(KeyClient, is_async=True) - return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs ) + return self.create_client_from_credential(KeyClient, credential=credential, vault_url=vault_uri, **kwargs) @pytest.mark.asyncio @pytest.mark.parametrize("api_version", only_default) diff --git a/sdk/keyvault/azure-keyvault-certificates/MANIFEST.in b/sdk/keyvault/azure-keyvault-certificates/MANIFEST.in index beadef0a4b14..a39bafcb9a1a 100644 --- a/sdk/keyvault/azure-keyvault-certificates/MANIFEST.in +++ b/sdk/keyvault/azure-keyvault-certificates/MANIFEST.in @@ -1,7 +1,8 @@ include *.md include LICENSE -include azure/keyvault/certificates/py.typed +include azure/keyvault/certificates/_generated/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py include azure/keyvault/__init__.py +include azure/keyvault/certificates/__init__.py diff --git a/sdk/keyvault/azure-keyvault-certificates/_metadata.json b/sdk/keyvault/azure-keyvault-certificates/_metadata.json new file mode 100644 index 000000000000..06284fddac1b --- /dev/null +++ b/sdk/keyvault/azure-keyvault-certificates/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "7.6" +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-certificates/apiview-properties.json b/sdk/keyvault/azure-keyvault-certificates/apiview-properties.json new file mode 100644 index 000000000000..a914a0c1052b --- /dev/null +++ b/sdk/keyvault/azure-keyvault-certificates/apiview-properties.json @@ -0,0 +1,98 @@ +{ + "CrossLanguagePackageId": "KeyVault", + "CrossLanguageDefinitionId": { + "azure.keyvault.certificates._generated.models.Action": "KeyVault.Action", + "azure.keyvault.certificates._generated.models.AdministratorDetails": "KeyVault.AdministratorDetails", + "azure.keyvault.certificates._generated.models.BackupCertificateResult": "KeyVault.BackupCertificateResult", + "azure.keyvault.certificates._generated.models.CertificateAttributes": "KeyVault.CertificateAttributes", + "azure.keyvault.certificates._generated.models.CertificateBundle": "KeyVault.CertificateBundle", + "azure.keyvault.certificates._generated.models.CertificateCreateParameters": "KeyVault.CertificateCreateParameters", + "azure.keyvault.certificates._generated.models.CertificateImportParameters": "KeyVault.CertificateImportParameters", + "azure.keyvault.certificates._generated.models.CertificateIssuerItem": "KeyVault.CertificateIssuerItem", + "azure.keyvault.certificates._generated.models.CertificateIssuerSetParameters": "KeyVault.CertificateIssuerSetParameters", + "azure.keyvault.certificates._generated.models.CertificateIssuerUpdateParameters": "KeyVault.CertificateIssuerUpdateParameters", + "azure.keyvault.certificates._generated.models.CertificateItem": "KeyVault.CertificateItem", + "azure.keyvault.certificates._generated.models.CertificateMergeParameters": "KeyVault.CertificateMergeParameters", + "azure.keyvault.certificates._generated.models.CertificateOperation": "KeyVault.CertificateOperation", + "azure.keyvault.certificates._generated.models.CertificateOperationUpdateParameter": "KeyVault.CertificateOperationUpdateParameter", + "azure.keyvault.certificates._generated.models.CertificatePolicy": "KeyVault.CertificatePolicy", + "azure.keyvault.certificates._generated.models.CertificateRestoreParameters": "KeyVault.CertificateRestoreParameters", + "azure.keyvault.certificates._generated.models.CertificateUpdateParameters": "KeyVault.CertificateUpdateParameters", + "azure.keyvault.certificates._generated.models.Contact": "KeyVault.Contact", + "azure.keyvault.certificates._generated.models.Contacts": "KeyVault.Contacts", + "azure.keyvault.certificates._generated.models.DeletedCertificateBundle": "KeyVault.DeletedCertificateBundle", + "azure.keyvault.certificates._generated.models.DeletedCertificateItem": "KeyVault.DeletedCertificateItem", + "azure.keyvault.certificates._generated.models.IssuerAttributes": "KeyVault.IssuerAttributes", + "azure.keyvault.certificates._generated.models.IssuerBundle": "KeyVault.IssuerBundle", + "azure.keyvault.certificates._generated.models.IssuerCredentials": "KeyVault.IssuerCredentials", + "azure.keyvault.certificates._generated.models.IssuerParameters": "KeyVault.IssuerParameters", + "azure.keyvault.certificates._generated.models.KeyProperties": "KeyVault.KeyProperties", + "azure.keyvault.certificates._generated.models.KeyVaultError": "KeyVaultError", + "azure.keyvault.certificates._generated.models.KeyVaultErrorError": "KeyVaultError.error.anonymous", + "azure.keyvault.certificates._generated.models.LifetimeAction": "KeyVault.LifetimeAction", + "azure.keyvault.certificates._generated.models.OrganizationDetails": "KeyVault.OrganizationDetails", + "azure.keyvault.certificates._generated.models.SecretProperties": "KeyVault.SecretProperties", + "azure.keyvault.certificates._generated.models.SubjectAlternativeNames": "KeyVault.SubjectAlternativeNames", + "azure.keyvault.certificates._generated.models.Trigger": "KeyVault.Trigger", + "azure.keyvault.certificates._generated.models.X509CertificateProperties": "KeyVault.X509CertificateProperties", + "azure.keyvault.certificates._generated.models.DeletionRecoveryLevel": "KeyVault.DeletionRecoveryLevel", + "azure.keyvault.certificates._generated.models.JsonWebKeyType": "KeyVault.JsonWebKeyType", + "azure.keyvault.certificates._generated.models.JsonWebKeyCurveName": "KeyVault.JsonWebKeyCurveName", + "azure.keyvault.certificates._generated.models.KeyUsageType": "KeyVault.KeyUsageType", + "azure.keyvault.certificates._generated.models.CertificatePolicyAction": "KeyVault.CertificatePolicyAction", + "azure.keyvault.certificates._generated.KeyVaultClient.get_certificates": "KeyVault.getCertificates", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_certificates": "KeyVault.getCertificates", + "azure.keyvault.certificates._generated.KeyVaultClient.delete_certificate": "KeyVault.deleteCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.delete_certificate": "KeyVault.deleteCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.set_certificate_contacts": "KeyVault.setCertificateContacts", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.set_certificate_contacts": "KeyVault.setCertificateContacts", + "azure.keyvault.certificates._generated.KeyVaultClient.get_certificate_contacts": "KeyVault.getCertificateContacts", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_certificate_contacts": "KeyVault.getCertificateContacts", + "azure.keyvault.certificates._generated.KeyVaultClient.delete_certificate_contacts": "KeyVault.deleteCertificateContacts", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.delete_certificate_contacts": "KeyVault.deleteCertificateContacts", + "azure.keyvault.certificates._generated.KeyVaultClient.get_certificate_issuers": "KeyVault.getCertificateIssuers", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_certificate_issuers": "KeyVault.getCertificateIssuers", + "azure.keyvault.certificates._generated.KeyVaultClient.set_certificate_issuer": "KeyVault.setCertificateIssuer", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.set_certificate_issuer": "KeyVault.setCertificateIssuer", + "azure.keyvault.certificates._generated.KeyVaultClient.update_certificate_issuer": "KeyVault.updateCertificateIssuer", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.update_certificate_issuer": "KeyVault.updateCertificateIssuer", + "azure.keyvault.certificates._generated.KeyVaultClient.get_certificate_issuer": "KeyVault.getCertificateIssuer", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_certificate_issuer": "KeyVault.getCertificateIssuer", + "azure.keyvault.certificates._generated.KeyVaultClient.delete_certificate_issuer": "KeyVault.deleteCertificateIssuer", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.delete_certificate_issuer": "KeyVault.deleteCertificateIssuer", + "azure.keyvault.certificates._generated.KeyVaultClient.create_certificate": "KeyVault.createCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.create_certificate": "KeyVault.createCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.import_certificate": "KeyVault.importCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.import_certificate": "KeyVault.importCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.get_certificate_versions": "KeyVault.getCertificateVersions", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_certificate_versions": "KeyVault.getCertificateVersions", + "azure.keyvault.certificates._generated.KeyVaultClient.get_certificate_policy": "KeyVault.getCertificatePolicy", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_certificate_policy": "KeyVault.getCertificatePolicy", + "azure.keyvault.certificates._generated.KeyVaultClient.update_certificate_policy": "KeyVault.updateCertificatePolicy", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.update_certificate_policy": "KeyVault.updateCertificatePolicy", + "azure.keyvault.certificates._generated.KeyVaultClient.update_certificate": "KeyVault.updateCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.update_certificate": "KeyVault.updateCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.get_certificate": "KeyVault.getCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_certificate": "KeyVault.getCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.update_certificate_operation": "KeyVault.updateCertificateOperation", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.update_certificate_operation": "KeyVault.updateCertificateOperation", + "azure.keyvault.certificates._generated.KeyVaultClient.get_certificate_operation": "KeyVault.getCertificateOperation", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_certificate_operation": "KeyVault.getCertificateOperation", + "azure.keyvault.certificates._generated.KeyVaultClient.delete_certificate_operation": "KeyVault.deleteCertificateOperation", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.delete_certificate_operation": "KeyVault.deleteCertificateOperation", + "azure.keyvault.certificates._generated.KeyVaultClient.merge_certificate": "KeyVault.mergeCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.merge_certificate": "KeyVault.mergeCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.backup_certificate": "KeyVault.backupCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.backup_certificate": "KeyVault.backupCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.restore_certificate": "KeyVault.restoreCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.restore_certificate": "KeyVault.restoreCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.get_deleted_certificates": "KeyVault.getDeletedCertificates", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_deleted_certificates": "KeyVault.getDeletedCertificates", + "azure.keyvault.certificates._generated.KeyVaultClient.get_deleted_certificate": "KeyVault.getDeletedCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.get_deleted_certificate": "KeyVault.getDeletedCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.purge_deleted_certificate": "KeyVault.purgeDeletedCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.purge_deleted_certificate": "KeyVault.purgeDeletedCertificate", + "azure.keyvault.certificates._generated.KeyVaultClient.recover_deleted_certificate": "KeyVault.recoverDeletedCertificate", + "azure.keyvault.certificates._generated.aio.KeyVaultClient.recover_deleted_certificate": "KeyVault.recoverDeletedCertificate" + } +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/__init__.py b/sdk/keyvault/azure-keyvault-certificates/azure/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/__init__.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/__init__.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/__init__.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/__init__.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/__init__.py index 3e8da4143494..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/__init__.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/__init__.py @@ -1,56 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -from ._client import CertificateClient -from ._enums import( - CertificatePolicyAction, - KeyCurveName, - KeyType, - CertificateContentType, - KeyUsageType, - WellKnownIssuerNames -) -from ._models import( - AdministratorContact, - CertificateContact, - CertificateIssuer, - CertificateOperation, - CertificateOperationError, - CertificatePolicy, - CertificateProperties, - DeletedCertificate, - IssuerProperties, - LifetimeAction, - KeyVaultCertificate, - KeyVaultCertificateIdentifier -) -from ._shared.client_base import ApiVersion - -__all__ = [ - "ApiVersion", - "CertificatePolicyAction", - "AdministratorContact", - "CertificateClient", - "CertificateContact", - "CertificateIssuer", - "CertificateOperation", - "CertificateOperationError", - "CertificatePolicy", - "CertificateProperties", - "DeletedCertificate", - "IssuerProperties", - "KeyCurveName", - "KeyType", - "KeyVaultCertificate", - "KeyVaultCertificateIdentifier", - "KeyUsageType", - "LifetimeAction", - "CertificateContentType", - "WellKnownIssuerNames", - "CertificateIssuer", - "IssuerProperties" -] - -from ._version import VERSION -__version__ = VERSION +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_client.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_client.py index d0f4f65fea02..41dc2a9f3da1 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_client.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_client.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -135,7 +136,7 @@ def begin_create_certificate( create_certificate_polling = CreateCertificatePoller( pipeline_response=pipeline_response, get_certificate_command=get_certificate_command, - interval=polling_interval + interval=polling_interval, ) def no_op(*_, **__) -> Any: # The deserialization callback is ignored based on polling implementation @@ -166,9 +167,7 @@ def get_certificate(self, certificate_name: str, **kwargs: Any) -> KeyVaultCerti :caption: Get a certificate :dedent: 8 """ - bundle = self._client.get_certificate( - certificate_name=certificate_name, certificate_version="", **kwargs - ) + bundle = self._client.get_certificate(certificate_name=certificate_name, certificate_version="", **kwargs) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @distributed_trace @@ -195,9 +194,7 @@ def get_certificate_version(self, certificate_name: str, version: str, **kwargs: :caption: Get a certificate with a specific version :dedent: 8 """ - bundle = self._client.get_certificate( - certificate_name=certificate_name, certificate_version=version, **kwargs - ) + bundle = self._client.get_certificate(certificate_name=certificate_name, certificate_version=version, **kwargs) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @distributed_trace @@ -272,9 +269,7 @@ def get_deleted_certificate(self, certificate_name: str, **kwargs: Any) -> Delet :caption: Get a deleted certificate :dedent: 8 """ - bundle = self._client.get_deleted_certificate( - certificate_name=certificate_name, **kwargs - ) + bundle = self._client.get_deleted_certificate(certificate_name=certificate_name, **kwargs) return DeletedCertificate._from_deleted_certificate_bundle(deleted_certificate_bundle=bundle) @distributed_trace @@ -294,9 +289,7 @@ def purge_deleted_certificate(self, certificate_name: str, **kwargs: Any) -> Non :raises ~azure.core.exceptions.HttpResponseError: """ - self._client.purge_deleted_certificate( - certificate_name=certificate_name, **kwargs - ) + self._client.purge_deleted_certificate(certificate_name=certificate_name, **kwargs) @distributed_trace def begin_recover_deleted_certificate(self, certificate_name: str, **kwargs: Any) -> LROPoller[KeyVaultCertificate]: @@ -341,7 +334,7 @@ def begin_recover_deleted_certificate(self, certificate_name: str, **kwargs: Any pipeline_response=pipeline_response, command=command, final_resource=recovered_certificate, - interval=polling_interval + interval=polling_interval, ) return KeyVaultOperationPoller(polling_method) @@ -403,9 +396,7 @@ def import_certificate( preserve_cert_order=preserve_order, ) - bundle = self._client.import_certificate( - certificate_name=certificate_name, parameters=parameters, **kwargs - ) + bundle = self._client.import_certificate(certificate_name=certificate_name, parameters=parameters, **kwargs) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @distributed_trace @@ -421,9 +412,7 @@ def get_certificate_policy(self, certificate_name: str, **kwargs: Any) -> Certif :raises ~azure.core.exceptions.HttpResponseError: """ - bundle = self._client.get_certificate_policy( - certificate_name=certificate_name, **kwargs - ) + bundle = self._client.get_certificate_policy(certificate_name=certificate_name, **kwargs) return CertificatePolicy._from_certificate_policy_bundle(certificate_policy_bundle=bundle) @distributed_trace @@ -444,9 +433,7 @@ def update_certificate_policy( :raises ~azure.core.exceptions.HttpResponseError: """ bundle = self._client.update_certificate_policy( - certificate_name=certificate_name, - certificate_policy=policy._to_certificate_policy_bundle(), - **kwargs + certificate_name=certificate_name, certificate_policy=policy._to_certificate_policy_bundle(), **kwargs ) return CertificatePolicy._from_certificate_policy_bundle(certificate_policy_bundle=bundle) @@ -488,15 +475,10 @@ def update_certificate_properties( else: attributes = None - parameters = self._models.CertificateUpdateParameters( - certificate_attributes=attributes, tags=tags - ) + parameters = self._models.CertificateUpdateParameters(certificate_attributes=attributes, tags=tags) bundle = self._client.update_certificate( - certificate_name=certificate_name, - certificate_version=version or "", - parameters=parameters, - **kwargs + certificate_name=certificate_name, certificate_version=version or "", parameters=parameters, **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @@ -525,9 +507,7 @@ def backup_certificate(self, certificate_name: str, **kwargs: Any) -> bytes: :caption: Get a certificate backup :dedent: 8 """ - backup_result = self._client.backup_certificate( - certificate_name=certificate_name, **kwargs - ) + backup_result = self._client.backup_certificate(certificate_name=certificate_name, **kwargs) return backup_result.value @distributed_trace @@ -554,8 +534,7 @@ def restore_certificate_backup(self, backup: bytes, **kwargs: Any) -> KeyVaultCe :dedent: 8 """ bundle = self._client.restore_certificate( - parameters=self._models.CertificateRestoreParameters(certificate_bundle_backup=backup), - **kwargs + parameters=self._models.CertificateRestoreParameters(certificate_bundle_backup=backup), **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @@ -601,7 +580,7 @@ def list_deleted_certificates( cls=lambda objs: [ DeletedCertificate._from_deleted_certificate_item(deleted_certificate_item=x) for x in objs ], - **kwargs + **kwargs, ) @distributed_trace @@ -643,7 +622,7 @@ def list_properties_of_certificates( return self._client.get_certificates( maxresults=max_page_size, cls=lambda objs: [CertificateProperties._from_certificate_item(certificate_item=x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -674,7 +653,7 @@ def list_properties_of_certificate_versions( certificate_name=certificate_name, maxresults=max_page_size, cls=lambda objs: [CertificateProperties._from_certificate_item(certificate_item=x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -698,8 +677,7 @@ def set_contacts(self, contacts: "List[CertificateContact]", **kwargs: Any) -> " :dedent: 8 """ new_contacts = self._client.set_certificate_contacts( - contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]), - **kwargs + contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]), **kwargs ) return [ CertificateContact._from_certificate_contacts_item(contact_item=item) for item in new_contacts.contact_list @@ -758,9 +736,7 @@ def get_certificate_operation(self, certificate_name: str, **kwargs: Any) -> Cer the former if the certificate doesn't exist; the latter for other errors """ - bundle = self._client.get_certificate_operation( - certificate_name=certificate_name, **kwargs - ) + bundle = self._client.get_certificate_operation(certificate_name=certificate_name, **kwargs) return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle) @distributed_trace @@ -776,9 +752,7 @@ def delete_certificate_operation(self, certificate_name: str, **kwargs: Any) -> :raises ~azure.core.exceptions.HttpResponseError: """ - bundle = self._client.delete_certificate_operation( - certificate_name=certificate_name, **kwargs - ) + bundle = self._client.delete_certificate_operation(certificate_name=certificate_name, **kwargs) return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle) @distributed_trace @@ -795,7 +769,7 @@ def cancel_certificate_operation(self, certificate_name: str, **kwargs: Any) -> bundle = self._client.update_certificate_operation( certificate_name=certificate_name, certificate_operation=self._models.CertificateOperationUpdateParameter(cancellation_requested=True), - **kwargs + **kwargs, ) return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle) @@ -840,9 +814,7 @@ def merge_certificate( x509_certificates=x509_certificates, certificate_attributes=attributes, tags=tags ) - bundle = self._client.merge_certificate( - certificate_name=certificate_name, parameters=parameters, **kwargs - ) + bundle = self._client.merge_certificate(certificate_name=certificate_name, parameters=parameters, **kwargs) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @distributed_trace @@ -865,9 +837,7 @@ def get_issuer(self, issuer_name: str, **kwargs: Any) -> CertificateIssuer: :caption: Get an issuer :dedent: 8 """ - issuer_bundle = self._client.get_certificate_issuer( - issuer_name=issuer_name, **kwargs - ) + issuer_bundle = self._client.get_certificate_issuer(issuer_name=issuer_name, **kwargs) return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle) @distributed_trace @@ -942,9 +912,7 @@ def create_issuer( attributes=issuer_attributes, ) - issuer_bundle = self._client.set_certificate_issuer( - issuer_name=issuer_name, parameter=parameters, **kwargs - ) + issuer_bundle = self._client.set_certificate_issuer(issuer_name=issuer_name, parameter=parameters, **kwargs) return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle) @distributed_trace @@ -1010,9 +978,7 @@ def update_issuer( attributes=issuer_attributes, ) - issuer_bundle = self._client.update_certificate_issuer( - issuer_name=issuer_name, parameter=parameters, **kwargs - ) + issuer_bundle = self._client.update_certificate_issuer(issuer_name=issuer_name, parameter=parameters, **kwargs) return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle) @distributed_trace @@ -1036,9 +1002,7 @@ def delete_issuer(self, issuer_name: str, **kwargs: Any) -> CertificateIssuer: :caption: Delete an issuer :dedent: 8 """ - issuer_bundle = self._client.delete_certificate_issuer( - issuer_name=issuer_name, **kwargs - ) + issuer_bundle = self._client.delete_certificate_issuer(issuer_name=issuer_name, **kwargs) return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle) @distributed_trace @@ -1064,7 +1028,7 @@ def list_properties_of_issuers(self, **kwargs: Any) -> ItemPaged[IssuerPropertie return self._client.get_certificate_issuers( maxresults=max_page_size, cls=lambda objs: [IssuerProperties._from_issuer_item(issuer_item=x) for x in objs], - **kwargs + **kwargs, ) def __enter__(self) -> "CertificateClient": diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_client.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_client.py index affcf5d228d3..39918fe3e74a 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_client.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_client.py @@ -15,14 +15,14 @@ from azure.core.rest import HttpRequest, HttpResponse from ._configuration import KeyVaultClientConfiguration -from ._operations import KeyVaultClientOperationsMixin +from ._operations._operations import _KeyVaultClientOperationsMixin from ._utils.serialization import Deserializer, Serializer if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_operations/__init__.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_operations/__init__.py index d514f5e4b5be..933fcd7d1b55 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import KeyVaultClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "KeyVaultClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_operations/_operations.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_operations/_operations.py index 44a0dcbcff20..a53a3a299454 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/_operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload import urllib.parse from azure.core import PipelineClient @@ -746,14 +746,14 @@ def build_key_vault_recover_deleted_certificate_request( # pylint: disable=name return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -class KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods +class _KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], KeyVaultClientConfiguration] ): @distributed_trace def get_certificates( self, *, maxresults: Optional[int] = None, include_pending: Optional[bool] = None, **kwargs: Any - ) -> Iterable["_models.CertificateItem"]: + ) -> ItemPaged["_models.CertificateItem"]: """List certificates in a specified key vault. The GetCertificates operation returns the set of certificates resources in the specified key @@ -1179,7 +1179,7 @@ def delete_certificate_contacts(self, **kwargs: Any) -> _models.Contacts: @distributed_trace def get_certificate_issuers( self, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> Iterable["_models.CertificateIssuerItem"]: + ) -> ItemPaged["_models.CertificateIssuerItem"]: """List certificate issuers for a specified key vault. The GetCertificateIssuers operation returns the set of certificate issuer resources in the @@ -2049,7 +2049,7 @@ def import_certificate( @distributed_trace def get_certificate_versions( self, certificate_name: str, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> Iterable["_models.CertificateItem"]: + ) -> ItemPaged["_models.CertificateItem"]: """List the versions of a certificate. The GetCertificateVersions operation returns the versions of a certificate in the specified key @@ -3279,7 +3279,7 @@ def restore_certificate( @distributed_trace def get_deleted_certificates( self, *, maxresults: Optional[int] = None, include_pending: Optional[bool] = None, **kwargs: Any - ) -> Iterable["_models.DeletedCertificateItem"]: + ) -> ItemPaged["_models.DeletedCertificateItem"]: """Lists the deleted certificates in the specified vault currently available for recovery. The GetDeletedCertificates operation retrieves the certificates in the current vault which are diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_client.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_client.py index 3f8e48a8e50c..f6cbad08a480 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_client.py @@ -16,13 +16,13 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import KeyVaultClientConfiguration -from ._operations import KeyVaultClientOperationsMixin +from ._operations._operations import _KeyVaultClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_operations/__init__.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_operations/__init__.py index d514f5e4b5be..933fcd7d1b55 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import KeyVaultClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "KeyVaultClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_operations/_operations.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_operations/_operations.py index 57b474b9fd13..47266b611fc7 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_generated/aio/_operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload import urllib.parse from azure.core import AsyncPipelineClient @@ -69,14 +69,14 @@ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods +class _KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], KeyVaultClientConfiguration] ): @distributed_trace def get_certificates( self, *, maxresults: Optional[int] = None, include_pending: Optional[bool] = None, **kwargs: Any - ) -> AsyncIterable["_models.CertificateItem"]: + ) -> AsyncItemPaged["_models.CertificateItem"]: """List certificates in a specified key vault. The GetCertificates operation returns the set of certificates resources in the specified key @@ -502,7 +502,7 @@ async def delete_certificate_contacts(self, **kwargs: Any) -> _models.Contacts: @distributed_trace def get_certificate_issuers( self, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable["_models.CertificateIssuerItem"]: + ) -> AsyncItemPaged["_models.CertificateIssuerItem"]: """List certificate issuers for a specified key vault. The GetCertificateIssuers operation returns the set of certificate issuer resources in the @@ -1372,7 +1372,7 @@ async def import_certificate( @distributed_trace def get_certificate_versions( self, certificate_name: str, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable["_models.CertificateItem"]: + ) -> AsyncItemPaged["_models.CertificateItem"]: """List the versions of a certificate. The GetCertificateVersions operation returns the versions of a certificate in the specified key @@ -2602,7 +2602,7 @@ async def restore_certificate( @distributed_trace def get_deleted_certificates( self, *, maxresults: Optional[int] = None, include_pending: Optional[bool] = None, **kwargs: Any - ) -> AsyncIterable["_models.DeletedCertificateItem"]: + ) -> AsyncItemPaged["_models.DeletedCertificateItem"]: """Lists the deleted certificates in the specified vault currently available for recovery. The GetDeletedCertificates operation retrieves the certificates in the current vault which are diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_models.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_models.py index febd6dc5d07a..692e0c5a1c1d 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_models.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_models.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -304,7 +305,7 @@ def version(self) -> Optional[str]: def preserve_order(self) -> Optional[bool]: """Whether the certificate order should be preserved. - :returns: Specifies whether the certificate chain preserves its original order. The default value is False, + :returns: Specifies whether the certificate chain preserves its original order. The default value is False, which sets the leaf certificate at index 0. :rtype: bool or None """ diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_shared/async_challenge_auth_policy.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_shared/async_challenge_auth_policy.py index 0f84607e3ccd..3e3ac1855178 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_shared/async_challenge_auth_policy.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/_shared/async_challenge_auth_policy.py @@ -66,7 +66,6 @@ async def await_result(func: Callable[P, Union[T, Awaitable[T]]], *args: P.args, return result - class AsyncChallengeAuthPolicy(AsyncBearerTokenCredentialPolicy): """Policy for handling HTTP authentication challenges. @@ -83,9 +82,7 @@ def __init__(self, credential: AsyncTokenProvider, *scopes: str, **kwargs: Any) self._verify_challenge_resource = kwargs.pop("verify_challenge_resource", True) self._request_copy: Optional[HttpRequest] = None - async def send( - self, request: PipelineRequest[HttpRequest] - ) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: + async def send(self, request: PipelineRequest[HttpRequest]) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: """Authorize request with a bearer token and send it to the next policy. We implement this method to account for the valid scenario where a Key Vault authentication challenge is @@ -156,7 +153,6 @@ async def handle_challenge_flow( await await_result(self.on_response, request, response) return response - async def on_request(self, request: PipelineRequest) -> None: _enforce_tls(request) challenge = ChallengeCache.get_challenge_for_url(request.http_request.url) @@ -227,9 +223,7 @@ async def on_challenge(self, request: PipelineRequest, response: PipelineRespons if challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs"): await self.authorize_request(request, scope, claims=challenge.claims) else: - await self.authorize_request( - request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id - ) + await self.authorize_request(request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id) return True diff --git a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/aio/_client.py b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/aio/_client.py index bd6288a39369..8cc11b7f0250 100644 --- a/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-certificates/azure/keyvault/certificates/aio/_client.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -119,7 +120,7 @@ async def create_certificate( certificate_name=certificate_name, parameters=parameters, cls=lambda pipeline_response, deserialized, _: (pipeline_response, deserialized), - **kwargs + **kwargs, ) create_certificate_operation = CertificateOperation._from_certificate_operation_bundle(cert_bundle) @@ -133,8 +134,10 @@ async def create_certificate( get_certificate_command=get_certificate_command, interval=polling_interval, ) + def no_op(*_, **__) -> Any: # The deserialization callback is ignored based on polling implementation pass + return await AsyncLROPoller(command, create_certificate_operation, no_op, create_certificate_polling) @distributed_trace_async @@ -160,17 +163,11 @@ async def get_certificate(self, certificate_name: str, **kwargs: Any) -> KeyVaul :caption: Get a certificate :dedent: 8 """ - bundle = await self._client.get_certificate( - certificate_name=certificate_name, - certificate_version="", - **kwargs - ) + bundle = await self._client.get_certificate(certificate_name=certificate_name, certificate_version="", **kwargs) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @distributed_trace_async - async def get_certificate_version( - self, certificate_name: str, version: str, **kwargs: Any - ) -> KeyVaultCertificate: + async def get_certificate_version(self, certificate_name: str, version: str, **kwargs: Any) -> KeyVaultCertificate: """Gets a specific version of a certificate without returning its management policy. Requires certificates/get permission. To get the latest version of the certificate, or to get the certificate's @@ -194,9 +191,7 @@ async def get_certificate_version( :dedent: 8 """ bundle = await self._client.get_certificate( - certificate_name=certificate_name, - certificate_version=version, - **kwargs + certificate_name=certificate_name, certificate_version=version, **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @@ -267,9 +262,7 @@ async def get_deleted_certificate(self, certificate_name: str, **kwargs: Any) -> :caption: Get a deleted certificate :dedent: 8 """ - bundle = await self._client.get_deleted_certificate( - certificate_name=certificate_name, **kwargs - ) + bundle = await self._client.get_deleted_certificate(certificate_name=certificate_name, **kwargs) return DeletedCertificate._from_deleted_certificate_bundle(deleted_certificate_bundle=bundle) @distributed_trace_async @@ -289,9 +282,7 @@ async def purge_deleted_certificate(self, certificate_name: str, **kwargs: Any) :raises ~azure.core.exceptions.HttpResponseError: """ - await self._client.purge_deleted_certificate( - certificate_name=certificate_name, **kwargs - ) + await self._client.purge_deleted_certificate(certificate_name=certificate_name, **kwargs) @distributed_trace_async async def recover_deleted_certificate(self, certificate_name: str, **kwargs: Any) -> KeyVaultCertificate: @@ -332,7 +323,7 @@ async def recover_deleted_certificate(self, certificate_name: str, **kwargs: Any command=command, final_resource=recovered_certificate, finished=False, - interval=polling_interval + interval=polling_interval, ) await polling_method.run() @@ -396,9 +387,7 @@ async def import_certificate( ) bundle = await self._client.import_certificate( - certificate_name=certificate_name, - parameters=parameters, - **kwargs + certificate_name=certificate_name, parameters=parameters, **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @@ -415,9 +404,7 @@ async def get_certificate_policy(self, certificate_name: str, **kwargs: Any) -> :raises ~azure.core.exceptions.HttpResponseError: """ - bundle = await self._client.get_certificate_policy( - certificate_name=certificate_name, **kwargs - ) + bundle = await self._client.get_certificate_policy(certificate_name=certificate_name, **kwargs) return CertificatePolicy._from_certificate_policy_bundle(certificate_policy_bundle=bundle) @distributed_trace_async @@ -438,9 +425,7 @@ async def update_certificate_policy( :raises ~azure.core.exceptions.HttpResponseError: """ bundle = await self._client.update_certificate_policy( - certificate_name=certificate_name, - certificate_policy=policy._to_certificate_policy_bundle(), - **kwargs + certificate_name=certificate_name, certificate_policy=policy._to_certificate_policy_bundle(), **kwargs ) return CertificatePolicy._from_certificate_policy_bundle(certificate_policy_bundle=bundle) @@ -482,15 +467,10 @@ async def update_certificate_properties( else: attributes = None - parameters = self._models.CertificateUpdateParameters( - certificate_attributes=attributes, tags=tags - ) + parameters = self._models.CertificateUpdateParameters(certificate_attributes=attributes, tags=tags) bundle = await self._client.update_certificate( - certificate_name=certificate_name, - certificate_version=version or "", - parameters=parameters, - **kwargs + certificate_name=certificate_name, certificate_version=version or "", parameters=parameters, **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @@ -519,9 +499,7 @@ async def backup_certificate(self, certificate_name: str, **kwargs: Any) -> byte :caption: Get a certificate backup :dedent: 8 """ - backup_result = await self._client.backup_certificate( - certificate_name=certificate_name, **kwargs - ) + backup_result = await self._client.backup_certificate(certificate_name=certificate_name, **kwargs) return backup_result.value @distributed_trace_async @@ -548,8 +526,7 @@ async def restore_certificate_backup(self, backup: bytes, **kwargs: Any) -> KeyV :dedent: 8 """ bundle = await self._client.restore_certificate( - parameters=self._models.CertificateRestoreParameters(certificate_bundle_backup=backup), - **kwargs + parameters=self._models.CertificateRestoreParameters(certificate_bundle_backup=backup), **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @@ -593,7 +570,7 @@ def list_deleted_certificates( return self._client.get_deleted_certificates( maxresults=max_page_size, cls=lambda objs: [DeletedCertificate._from_deleted_certificate_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -635,7 +612,7 @@ def list_properties_of_certificates( return self._client.get_certificates( maxresults=max_page_size, cls=lambda objs: [CertificateProperties._from_certificate_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -666,7 +643,7 @@ def list_properties_of_certificate_versions( certificate_name=certificate_name, maxresults=max_page_size, cls=lambda objs: [CertificateProperties._from_certificate_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace_async @@ -690,8 +667,7 @@ async def set_contacts(self, contacts: List[CertificateContact], **kwargs: Any) :dedent: 8 """ new_contacts = await self._client.set_certificate_contacts( - contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]), - **kwargs + contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]), **kwargs ) return [ CertificateContact._from_certificate_contacts_item(contact_item=item) for item in new_contacts.contact_list @@ -714,8 +690,7 @@ async def get_contacts(self, **kwargs: Any) -> List[CertificateContact]: :caption: Get contacts :dedent: 8 """ - contacts = await self._client.get_certificate_contacts( **kwargs - ) + contacts = await self._client.get_certificate_contacts(**kwargs) return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list] @distributed_trace_async @@ -735,9 +710,7 @@ async def delete_contacts(self, **kwargs: Any) -> List[CertificateContact]: :caption: Delete contacts :dedent: 8 """ - contacts = await self._client.delete_certificate_contacts( - **kwargs - ) + contacts = await self._client.delete_certificate_contacts(**kwargs) return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list] @distributed_trace_async @@ -753,9 +726,7 @@ async def get_certificate_operation(self, certificate_name: str, **kwargs: Any) the former if the certificate doesn't exist; the latter for other errors """ - bundle = await self._client.get_certificate_operation( - certificate_name=certificate_name, **kwargs - ) + bundle = await self._client.get_certificate_operation(certificate_name=certificate_name, **kwargs) return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle) @distributed_trace_async @@ -772,9 +743,7 @@ async def delete_certificate_operation(self, certificate_name: str, **kwargs: An :raises ~azure.core.exceptions.ResourceNotFoundError or ~azure.core.exceptions.HttpResponseError: the former if the operation doesn't exist; the latter for other errors """ - bundle = await self._client.delete_certificate_operation( - certificate_name=certificate_name, **kwargs - ) + bundle = await self._client.delete_certificate_operation(certificate_name=certificate_name, **kwargs) return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle) @distributed_trace_async @@ -791,7 +760,7 @@ async def cancel_certificate_operation(self, certificate_name: str, **kwargs: An bundle = await self._client.update_certificate_operation( certificate_name=certificate_name, certificate_operation=self._models.CertificateOperationUpdateParameter(cancellation_requested=True), - **kwargs + **kwargs, ) return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle) @@ -836,9 +805,7 @@ async def merge_certificate( ) bundle = await self._client.merge_certificate( - certificate_name=certificate_name, - parameters=parameters, - **kwargs + certificate_name=certificate_name, parameters=parameters, **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle) @@ -862,9 +829,7 @@ async def get_issuer(self, issuer_name: str, **kwargs: Any) -> CertificateIssuer :caption: Get an issuer :dedent: 8 """ - issuer_bundle = await self._client.get_certificate_issuer( - issuer_name=issuer_name, **kwargs - ) + issuer_bundle = await self._client.get_certificate_issuer(issuer_name=issuer_name, **kwargs) return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle) @distributed_trace_async @@ -1034,9 +999,7 @@ async def delete_issuer(self, issuer_name: str, **kwargs: Any) -> CertificateIss :caption: Delete an issuer :dedent: 8 """ - issuer_bundle = await self._client.delete_certificate_issuer( - issuer_name=issuer_name, **kwargs - ) + issuer_bundle = await self._client.delete_certificate_issuer(issuer_name=issuer_name, **kwargs) return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle) @distributed_trace @@ -1060,9 +1023,7 @@ def list_properties_of_issuers(self, **kwargs: Any) -> AsyncItemPaged[IssuerProp """ max_page_size = kwargs.pop("max_page_size", None) return self._client.get_certificate_issuers( - maxresults=max_page_size, - cls=lambda objs: [IssuerProperties._from_issuer_item(x) for x in objs], - **kwargs + maxresults=max_page_size, cls=lambda objs: [IssuerProperties._from_issuer_item(x) for x in objs], **kwargs ) async def __aenter__(self) -> "CertificateClient": diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/backup_restore_operations.py b/sdk/keyvault/azure-keyvault-certificates/samples/backup_restore_operations.py index c6e83e410383..5f9eff53f9ef 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/backup_restore_operations.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/backup_restore_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/backup_restore_operations_async.py b/sdk/keyvault/azure-keyvault-certificates/samples/backup_restore_operations_async.py index 8ed40e3b8210..e85cd9d5967c 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/backup_restore_operations_async.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/backup_restore_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/contacts.py b/sdk/keyvault/azure-keyvault-certificates/samples/contacts.py index 0c518e230ecf..2cb45aa52862 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/contacts.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/contacts.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/contacts_async.py b/sdk/keyvault/azure-keyvault-certificates/samples/contacts_async.py index 994b7db23004..fc05c0f3ebe4 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/contacts_async.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/contacts_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/hello_world.py b/sdk/keyvault/azure-keyvault-certificates/samples/hello_world.py index b2df23b1a491..baeaabff1e42 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/hello_world.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/hello_world.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/hello_world_async.py b/sdk/keyvault/azure-keyvault-certificates/samples/hello_world_async.py index d712b51e5b82..826fbba557d3 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/hello_world_async.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/hello_world_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/import_certificate.py b/sdk/keyvault/azure-keyvault-certificates/samples/import_certificate.py index 3819b769b131..a886ba8f099f 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/import_certificate.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/import_certificate.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/import_certificate_async.py b/sdk/keyvault/azure-keyvault-certificates/samples/import_certificate_async.py index 150d071788d1..de8b3619d08b 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/import_certificate_async.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/import_certificate_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/issuers.py b/sdk/keyvault/azure-keyvault-certificates/samples/issuers.py index 57936e4f917f..e7babf5e0658 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/issuers.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/issuers.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/issuers_async.py b/sdk/keyvault/azure-keyvault-certificates/samples/issuers_async.py index 9aed2e1af2fe..393c1ffbcaef 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/issuers_async.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/issuers_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/list_operations.py b/sdk/keyvault/azure-keyvault-certificates/samples/list_operations.py index 8345036baedc..21ac303178e6 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/list_operations.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/list_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/list_operations_async.py b/sdk/keyvault/azure-keyvault-certificates/samples/list_operations_async.py index 54d21eb47342..21d92d58ed7b 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/list_operations_async.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/list_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/parse_certificate.py b/sdk/keyvault/azure-keyvault-certificates/samples/parse_certificate.py index 1834734056ba..14d4d9ced7c9 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/parse_certificate.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/parse_certificate.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/parse_certificate_async.py b/sdk/keyvault/azure-keyvault-certificates/samples/parse_certificate_async.py index 3d8f2e69382a..a24a9dbf6f39 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/parse_certificate_async.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/parse_certificate_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/recover_purge_operations.py b/sdk/keyvault/azure-keyvault-certificates/samples/recover_purge_operations.py index ae17348dbdd7..6750e7695282 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/recover_purge_operations.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/recover_purge_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/samples/recover_purge_operations_async.py b/sdk/keyvault/azure-keyvault-certificates/samples/recover_purge_operations_async.py index 9c8ceb14ccb4..edd06b98d307 100644 --- a/sdk/keyvault/azure-keyvault-certificates/samples/recover_purge_operations_async.py +++ b/sdk/keyvault/azure-keyvault-certificates/samples/recover_purge_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/setup.py b/sdk/keyvault/azure-keyvault-certificates/setup.py index b9ea86ee01d9..9305768e0a00 100644 --- a/sdk/keyvault/azure-keyvault-certificates/setup.py +++ b/sdk/keyvault/azure-keyvault-certificates/setup.py @@ -14,9 +14,10 @@ PACKAGE_NAME = "azure-keyvault-certificates" PACKAGE_PPRINT_NAME = "Key Vault Certificates" +PACKAGE_NAMESPACE = "azure.keyvault.certificates._generated" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: @@ -29,7 +30,6 @@ setup( name=PACKAGE_NAME, version=version, - include_package_data=True, description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), long_description=open("README.md", "r").read(), long_description_content_type="text/markdown", @@ -39,7 +39,7 @@ url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk", keywords="azure, azure sdk", classifiers=[ - "Development Status :: 5 - Production/Stable", + "Development Status :: ", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", @@ -47,22 +47,21 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", "License :: OSI Approved :: MIT License", ], zip_safe=False, packages=find_packages( exclude=[ - "samples", "tests", - # Exclude packages that will be covered by PEP420 or nspkg - "azure", - "azure.keyvault", ] ), + include_package_data=True, + package_data={ + "azure.keyvault.certificates._generated": ["py.typed"], + }, install_requires=[ "isodate>=0.6.1", - "azure-core>=1.31.0", + "azure-core>=1.30.0", "typing-extensions>=4.6.0", ], python_requires=">=3.9", diff --git a/sdk/keyvault/azure-keyvault-certificates/tests/certs.py b/sdk/keyvault/azure-keyvault-certificates/tests/certs.py index b369590befcc..c94546338d86 100644 --- a/sdk/keyvault/azure-keyvault-certificates/tests/certs.py +++ b/sdk/keyvault/azure-keyvault-certificates/tests/certs.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-certificates/tests/test_certificates_client.py b/sdk/keyvault/azure-keyvault-certificates/tests/test_certificates_client.py index 74932f3cffa8..938fd5b7c676 100644 --- a/sdk/keyvault/azure-keyvault-certificates/tests/test_certificates_client.py +++ b/sdk/keyvault/azure-keyvault-certificates/tests/test_certificates_client.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -782,9 +783,7 @@ def test_unknown_issuer_response(self, client, **kwargs): content_type=CertificateContentType.pkcs12, validity_in_months=24, ) - create_certificate_poller = client.begin_create_certificate( - certificate_name=cert_name, policy=cert_policy - ) + create_certificate_poller = client.begin_create_certificate(certificate_name=cert_name, policy=cert_policy) result = create_certificate_poller.result() # The operation should indicate that certificate creation is in progress and requires a merge to complete assert isinstance(result, CertificateOperation) diff --git a/sdk/keyvault/azure-keyvault-certificates/tests/test_certificates_client_async.py b/sdk/keyvault/azure-keyvault-certificates/tests/test_certificates_client_async.py index a0661214c939..ac8729c62943 100644 --- a/sdk/keyvault/azure-keyvault-certificates/tests/test_certificates_client_async.py +++ b/sdk/keyvault/azure-keyvault-certificates/tests/test_certificates_client_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-keys/MANIFEST.in b/sdk/keyvault/azure-keyvault-keys/MANIFEST.in index d4688a08c24e..7696bd6b2f38 100644 --- a/sdk/keyvault/azure-keyvault-keys/MANIFEST.in +++ b/sdk/keyvault/azure-keyvault-keys/MANIFEST.in @@ -1,7 +1,8 @@ include *.md include LICENSE -include azure/keyvault/keys/py.typed +include azure/keyvault/keys/_generated/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py include azure/keyvault/__init__.py +include azure/keyvault/keys/__init__.py diff --git a/sdk/keyvault/azure-keyvault-keys/_metadata.json b/sdk/keyvault/azure-keyvault-keys/_metadata.json new file mode 100644 index 000000000000..06284fddac1b --- /dev/null +++ b/sdk/keyvault/azure-keyvault-keys/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "7.6" +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-keys/apiview-properties.json b/sdk/keyvault/azure-keyvault-keys/apiview-properties.json new file mode 100644 index 000000000000..2fbacff777a3 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-keys/apiview-properties.json @@ -0,0 +1,92 @@ +{ + "CrossLanguagePackageId": "KeyVault", + "CrossLanguageDefinitionId": { + "azure.keyvault.keys._generated.models.BackupKeyResult": "KeyVault.BackupKeyResult", + "azure.keyvault.keys._generated.models.DeletedKeyBundle": "KeyVault.DeletedKeyBundle", + "azure.keyvault.keys._generated.models.DeletedKeyItem": "KeyVault.DeletedKeyItem", + "azure.keyvault.keys._generated.models.GetRandomBytesRequest": "KeyVault.GetRandomBytesRequest", + "azure.keyvault.keys._generated.models.JsonWebKey": "KeyVault.JsonWebKey", + "azure.keyvault.keys._generated.models.KeyAttestation": "KeyVault.KeyAttestation", + "azure.keyvault.keys._generated.models.KeyAttributes": "KeyVault.KeyAttributes", + "azure.keyvault.keys._generated.models.KeyBundle": "KeyVault.KeyBundle", + "azure.keyvault.keys._generated.models.KeyCreateParameters": "KeyVault.KeyCreateParameters", + "azure.keyvault.keys._generated.models.KeyImportParameters": "KeyVault.KeyImportParameters", + "azure.keyvault.keys._generated.models.KeyItem": "KeyVault.KeyItem", + "azure.keyvault.keys._generated.models.KeyOperationResult": "KeyVault.KeyOperationResult", + "azure.keyvault.keys._generated.models.KeyOperationsParameters": "KeyVault.KeyOperationsParameters", + "azure.keyvault.keys._generated.models.KeyReleaseParameters": "KeyVault.KeyReleaseParameters", + "azure.keyvault.keys._generated.models.KeyReleasePolicy": "KeyVault.KeyReleasePolicy", + "azure.keyvault.keys._generated.models.KeyReleaseResult": "KeyVault.KeyReleaseResult", + "azure.keyvault.keys._generated.models.KeyRestoreParameters": "KeyVault.KeyRestoreParameters", + "azure.keyvault.keys._generated.models.KeyRotationPolicy": "KeyVault.KeyRotationPolicy", + "azure.keyvault.keys._generated.models.KeyRotationPolicyAttributes": "KeyVault.KeyRotationPolicyAttributes", + "azure.keyvault.keys._generated.models.KeySignParameters": "KeyVault.KeySignParameters", + "azure.keyvault.keys._generated.models.KeyUpdateParameters": "KeyVault.KeyUpdateParameters", + "azure.keyvault.keys._generated.models.KeyVaultError": "KeyVaultError", + "azure.keyvault.keys._generated.models.KeyVaultErrorError": "KeyVaultError.error.anonymous", + "azure.keyvault.keys._generated.models.KeyVerifyParameters": "KeyVault.KeyVerifyParameters", + "azure.keyvault.keys._generated.models.KeyVerifyResult": "KeyVault.KeyVerifyResult", + "azure.keyvault.keys._generated.models.LifetimeActions": "KeyVault.LifetimeActions", + "azure.keyvault.keys._generated.models.LifetimeActionsTrigger": "KeyVault.LifetimeActionsTrigger", + "azure.keyvault.keys._generated.models.LifetimeActionsType": "KeyVault.LifetimeActionsType", + "azure.keyvault.keys._generated.models.RandomBytes": "KeyVault.RandomBytes", + "azure.keyvault.keys._generated.models.JsonWebKeyType": "KeyVault.JsonWebKeyType", + "azure.keyvault.keys._generated.models.JsonWebKeyCurveName": "KeyVault.JsonWebKeyCurveName", + "azure.keyvault.keys._generated.models.DeletionRecoveryLevel": "KeyVault.DeletionRecoveryLevel", + "azure.keyvault.keys._generated.models.JsonWebKeyOperation": "KeyVault.JsonWebKeyOperation", + "azure.keyvault.keys._generated.models.JsonWebKeyEncryptionAlgorithm": "KeyVault.JsonWebKeyEncryptionAlgorithm", + "azure.keyvault.keys._generated.models.JsonWebKeySignatureAlgorithm": "KeyVault.JsonWebKeySignatureAlgorithm", + "azure.keyvault.keys._generated.models.KeyEncryptionAlgorithm": "KeyVault.KeyEncryptionAlgorithm", + "azure.keyvault.keys._generated.models.KeyRotationPolicyAction": "KeyVault.KeyRotationPolicyAction", + "azure.keyvault.keys._generated.KeyVaultClient.create_key": "KeyVault.createKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.create_key": "KeyVault.createKey", + "azure.keyvault.keys._generated.KeyVaultClient.rotate_key": "KeyVault.rotateKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.rotate_key": "KeyVault.rotateKey", + "azure.keyvault.keys._generated.KeyVaultClient.import_key": "KeyVault.importKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.import_key": "KeyVault.importKey", + "azure.keyvault.keys._generated.KeyVaultClient.delete_key": "KeyVault.deleteKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.delete_key": "KeyVault.deleteKey", + "azure.keyvault.keys._generated.KeyVaultClient.update_key": "KeyVault.updateKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.update_key": "KeyVault.updateKey", + "azure.keyvault.keys._generated.KeyVaultClient.get_key": "KeyVault.getKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_key": "KeyVault.getKey", + "azure.keyvault.keys._generated.KeyVaultClient.get_key_versions": "KeyVault.getKeyVersions", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_key_versions": "KeyVault.getKeyVersions", + "azure.keyvault.keys._generated.KeyVaultClient.get_keys": "KeyVault.getKeys", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_keys": "KeyVault.getKeys", + "azure.keyvault.keys._generated.KeyVaultClient.backup_key": "KeyVault.backupKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.backup_key": "KeyVault.backupKey", + "azure.keyvault.keys._generated.KeyVaultClient.restore_key": "KeyVault.restoreKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.restore_key": "KeyVault.restoreKey", + "azure.keyvault.keys._generated.KeyVaultClient.encrypt": "KeyVault.encrypt", + "azure.keyvault.keys._generated.aio.KeyVaultClient.encrypt": "KeyVault.encrypt", + "azure.keyvault.keys._generated.KeyVaultClient.decrypt": "KeyVault.decrypt", + "azure.keyvault.keys._generated.aio.KeyVaultClient.decrypt": "KeyVault.decrypt", + "azure.keyvault.keys._generated.KeyVaultClient.sign": "KeyVault.sign", + "azure.keyvault.keys._generated.aio.KeyVaultClient.sign": "KeyVault.sign", + "azure.keyvault.keys._generated.KeyVaultClient.verify": "KeyVault.verify", + "azure.keyvault.keys._generated.aio.KeyVaultClient.verify": "KeyVault.verify", + "azure.keyvault.keys._generated.KeyVaultClient.wrap_key": "KeyVault.wrapKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.wrap_key": "KeyVault.wrapKey", + "azure.keyvault.keys._generated.KeyVaultClient.unwrap_key": "KeyVault.unwrapKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.unwrap_key": "KeyVault.unwrapKey", + "azure.keyvault.keys._generated.KeyVaultClient.release": "KeyVault.release", + "azure.keyvault.keys._generated.aio.KeyVaultClient.release": "KeyVault.release", + "azure.keyvault.keys._generated.KeyVaultClient.get_deleted_keys": "KeyVault.getDeletedKeys", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_deleted_keys": "KeyVault.getDeletedKeys", + "azure.keyvault.keys._generated.KeyVaultClient.get_deleted_key": "KeyVault.getDeletedKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_deleted_key": "KeyVault.getDeletedKey", + "azure.keyvault.keys._generated.KeyVaultClient.purge_deleted_key": "KeyVault.purgeDeletedKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.purge_deleted_key": "KeyVault.purgeDeletedKey", + "azure.keyvault.keys._generated.KeyVaultClient.recover_deleted_key": "KeyVault.recoverDeletedKey", + "azure.keyvault.keys._generated.aio.KeyVaultClient.recover_deleted_key": "KeyVault.recoverDeletedKey", + "azure.keyvault.keys._generated.KeyVaultClient.get_key_rotation_policy": "KeyVault.getKeyRotationPolicy", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_key_rotation_policy": "KeyVault.getKeyRotationPolicy", + "azure.keyvault.keys._generated.KeyVaultClient.update_key_rotation_policy": "KeyVault.updateKeyRotationPolicy", + "azure.keyvault.keys._generated.aio.KeyVaultClient.update_key_rotation_policy": "KeyVault.updateKeyRotationPolicy", + "azure.keyvault.keys._generated.KeyVaultClient.get_random_bytes": "KeyVault.getRandomBytes", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_random_bytes": "KeyVault.getRandomBytes", + "azure.keyvault.keys._generated.KeyVaultClient.get_key_attestation": "KeyVault.getKeyAttestation", + "azure.keyvault.keys._generated.aio.KeyVaultClient.get_key_attestation": "KeyVault.getKeyAttestation" + } +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-keys/azure/__init__.py b/sdk/keyvault/azure-keyvault-keys/azure/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/__init__.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/__init__.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/__init__.py index 679ab6995134..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/__init__.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/__init__.py @@ -1,5 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/__init__.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/__init__.py index 3a06bca6b656..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/__init__.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/__init__.py @@ -1,43 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------- -from ._enums import KeyCurveName, KeyExportEncryptionAlgorithm, KeyOperation, KeyRotationPolicyAction, KeyType -from ._shared.client_base import ApiVersion -from ._models import ( - DeletedKey, - JsonWebKey, - KeyAttestation, - KeyProperties, - KeyReleasePolicy, - KeyRotationLifetimeAction, - KeyRotationPolicy, - KeyVaultKey, - KeyVaultKeyIdentifier, - ReleaseKeyResult, -) -from ._client import KeyClient - -__all__ = [ - "ApiVersion", - "KeyClient", - "JsonWebKey", - "KeyAttestation", - "KeyVaultKey", - "KeyVaultKeyIdentifier", - "KeyCurveName", - "KeyExportEncryptionAlgorithm", - "KeyOperation", - "KeyRotationPolicyAction", - "KeyType", - "DeletedKey", - "KeyProperties", - "KeyReleasePolicy", - "KeyRotationLifetimeAction", - "KeyRotationPolicy", - "ReleaseKeyResult", -] - -from ._version import VERSION - -__version__ = VERSION +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py index 6ab95a316bd7..d54c2457648e 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_client.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -78,12 +79,12 @@ def _get_attributes( return None def get_cryptography_client( - self, - key_name: str, - *, - key_version: Optional[str] = None, - **kwargs, # pylint: disable=unused-argument - ) -> CryptographyClient: + self, + key_name: str, + *, + key_version: Optional[str] = None, + **kwargs, # pylint: disable=unused-argument + ) -> CryptographyClient: """Gets a :class:`~azure.keyvault.keys.crypto.CryptographyClient` for the given key. :param str key_name: The name of the key used to perform cryptographic operations. @@ -398,7 +399,9 @@ def create_oct_key( ) @distributed_trace - def begin_delete_key(self, name: str, **kwargs: Any) -> LROPoller[DeletedKey]: # pylint:disable=bad-option-value,delete-operation-wrong-return-type + def begin_delete_key( + self, name: str, **kwargs: Any + ) -> LROPoller[DeletedKey]: # pylint:disable=bad-option-value,delete-operation-wrong-return-type """Delete all versions of a key and its cryptographic material. Requires keys/delete permission. When this method returns Key Vault has begun deleting the key. Deletion may @@ -519,7 +522,7 @@ def list_deleted_keys(self, **kwargs: Any) -> ItemPaged[DeletedKey]: return self._client.get_deleted_keys( maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [DeletedKey._from_deleted_key_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -542,7 +545,7 @@ def list_properties_of_keys(self, **kwargs: Any) -> ItemPaged[KeyProperties]: return self._client.get_keys( maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [KeyProperties._from_key_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -568,7 +571,7 @@ def list_properties_of_key_versions(self, name: str, **kwargs: Any) -> ItemPaged name, maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [KeyProperties._from_key_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -709,9 +712,7 @@ def update_key_properties( release_policy=policy, ) - bundle = self._client.update_key( - name, key_version=version or "", parameters=parameters, **kwargs - ) + bundle = self._client.update_key(name, key_version=version or "", parameters=parameters, **kwargs) return KeyVaultKey._from_key_bundle(bundle) @distributed_trace @@ -770,8 +771,7 @@ def restore_key_backup(self, backup: bytes, **kwargs: Any) -> KeyVaultKey: :dedent: 8 """ bundle = self._client.restore_key( - parameters=self._models.KeyRestoreParameters(key_bundle_backup=backup), - **kwargs + parameters=self._models.KeyRestoreParameters(key_bundle_backup=backup), **kwargs ) return KeyVaultKey._from_key_bundle(bundle) @@ -877,7 +877,7 @@ def release_key( nonce=nonce, enc=algorithm, ), - **kwargs + **kwargs, ) return ReleaseKeyResult(result.value) @@ -988,7 +988,7 @@ def update_key_rotation_policy( # pylint: disable=unused-argument @distributed_trace def get_key_attestation(self, name: str, version: Optional[str] = None, **kwargs: Any) -> KeyVaultKey: """Get a key and its attestation blob. - + This method is applicable to any key stored in Azure Key Vault Managed HSM. This operation requires the keys/get permission. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_client.py index affcf5d228d3..39918fe3e74a 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_client.py @@ -15,14 +15,14 @@ from azure.core.rest import HttpRequest, HttpResponse from ._configuration import KeyVaultClientConfiguration -from ._operations import KeyVaultClientOperationsMixin +from ._operations._operations import _KeyVaultClientOperationsMixin from ._utils.serialization import Deserializer, Serializer if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/__init__.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/__init__.py index d514f5e4b5be..933fcd7d1b55 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import KeyVaultClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "KeyVaultClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/_operations.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/_operations.py index 2acc3fc6df38..fffeacdf8bac 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload import urllib.parse from azure.core import PipelineClient @@ -691,7 +691,7 @@ def build_key_vault_get_key_attestation_request( # pylint: disable=name-too-lon return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -class KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods +class _KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], KeyVaultClientConfiguration] ): @@ -1421,7 +1421,7 @@ def get_key(self, key_name: str, key_version: str, **kwargs: Any) -> _models.Key @distributed_trace def get_key_versions( self, key_name: str, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> Iterable["_models.KeyItem"]: + ) -> ItemPaged["_models.KeyItem"]: """Retrieves a list of individual key versions with the same key name. The full key identifier, attributes, and tags are provided in the response. This operation @@ -1514,7 +1514,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> Iterable["_models.KeyItem"]: + def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> ItemPaged["_models.KeyItem"]: """List keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the @@ -3182,7 +3182,7 @@ def release( @distributed_trace def get_deleted_keys( self, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> Iterable["_models.DeletedKeyItem"]: + ) -> ItemPaged["_models.DeletedKeyItem"]: """Lists the deleted keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the @@ -3814,8 +3814,9 @@ def get_random_bytes( @distributed_trace @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "key_name", "key_version", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "key_name", "key_version", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) def get_key_attestation(self, key_name: str, key_version: str, **kwargs: Any) -> _models.KeyBundle: """Gets the public part of a stored key along with its attestation blob. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_validation.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_validation.py index 752b2822f9d3..f5af3a4eb8a2 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_validation.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/_validation.py @@ -10,6 +10,22 @@ def api_version_validation(**kwargs): params_added_on = kwargs.pop("params_added_on", {}) method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default def decorator(func): @functools.wraps(func) @@ -21,7 +37,7 @@ def wrapper(*args, **kwargs): except AttributeError: return func(*args, **kwargs) - if method_added_on > client_api_version: + if _index_with_default(method_added_on) > _index_with_default(client_api_version): raise ValueError( f"'{func.__name__}' is not available in API version " f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." @@ -31,7 +47,7 @@ def wrapper(*args, **kwargs): parameter: api_version for api_version, parameters in params_added_on.items() for parameter in parameters - if parameter in kwargs and api_version > client_api_version + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) } if unsupported: raise ValueError( diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_client.py index 3f8e48a8e50c..f6cbad08a480 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_client.py @@ -16,13 +16,13 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import KeyVaultClientConfiguration -from ._operations import KeyVaultClientOperationsMixin +from ._operations._operations import _KeyVaultClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/__init__.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/__init__.py index d514f5e4b5be..933fcd7d1b55 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import KeyVaultClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "KeyVaultClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/_operations.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/_operations.py index 33877dd1143c..da64cfb99e82 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/aio/_operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload import urllib.parse from azure.core import AsyncPipelineClient @@ -68,7 +68,7 @@ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods +class _KeyVaultClientOperationsMixin( # pylint: disable=too-many-public-methods ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], KeyVaultClientConfiguration] ): @@ -798,7 +798,7 @@ async def get_key(self, key_name: str, key_version: str, **kwargs: Any) -> _mode @distributed_trace def get_key_versions( self, key_name: str, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable["_models.KeyItem"]: + ) -> AsyncItemPaged["_models.KeyItem"]: """Retrieves a list of individual key versions with the same key name. The full key identifier, attributes, and tags are provided in the response. This operation @@ -891,7 +891,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace - def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> AsyncIterable["_models.KeyItem"]: + def get_keys(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> AsyncItemPaged["_models.KeyItem"]: """List keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the @@ -2559,7 +2559,7 @@ async def release( @distributed_trace def get_deleted_keys( self, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable["_models.DeletedKeyItem"]: + ) -> AsyncItemPaged["_models.DeletedKeyItem"]: """Lists the deleted keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the @@ -3192,8 +3192,9 @@ async def get_random_bytes( @distributed_trace_async @api_version_validation( - method_added_on="7.6", - params_added_on={"7.6": ["api_version", "key_name", "key_version", "accept"]}, + method_added_on="7.6-preview.2", + params_added_on={"7.6-preview.2": ["api_version", "key_name", "key_version", "accept"]}, + api_versions_list=["7.6-preview.2", "7.6"], ) async def get_key_attestation(self, key_name: str, key_version: str, **kwargs: Any) -> _models.KeyBundle: """Gets the public part of a stored key along with its attestation blob. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py index b0c6b4138afd..c6f1cb236ad1 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_models.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -312,7 +313,9 @@ def attestation(self) -> Optional[KeyAttestation]: # attestation was added in 7.6-preview.2 if self._attributes: attestation = getattr(self._attributes, "attestation", None) - return KeyAttestation._from_generated(attestation=attestation) if attestation else None # pylint:disable=protected-access + return ( + KeyAttestation._from_generated(attestation=attestation) if attestation else None + ) # pylint:disable=protected-access return None @@ -411,7 +414,8 @@ def _from_generated(cls, policy: "_models.KeyRotationPolicy") -> "KeyRotationPol [] if policy.lifetime_actions is None else [ - KeyRotationLifetimeAction._from_generated(action) for action in policy.lifetime_actions # pylint:disable=protected-access + KeyRotationLifetimeAction._from_generated(action) + for action in policy.lifetime_actions # pylint:disable=protected-access ] ) if policy.attributes: diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/async_challenge_auth_policy.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/async_challenge_auth_policy.py index 0f84607e3ccd..3e3ac1855178 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/async_challenge_auth_policy.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_shared/async_challenge_auth_policy.py @@ -66,7 +66,6 @@ async def await_result(func: Callable[P, Union[T, Awaitable[T]]], *args: P.args, return result - class AsyncChallengeAuthPolicy(AsyncBearerTokenCredentialPolicy): """Policy for handling HTTP authentication challenges. @@ -83,9 +82,7 @@ def __init__(self, credential: AsyncTokenProvider, *scopes: str, **kwargs: Any) self._verify_challenge_resource = kwargs.pop("verify_challenge_resource", True) self._request_copy: Optional[HttpRequest] = None - async def send( - self, request: PipelineRequest[HttpRequest] - ) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: + async def send(self, request: PipelineRequest[HttpRequest]) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: """Authorize request with a bearer token and send it to the next policy. We implement this method to account for the valid scenario where a Key Vault authentication challenge is @@ -156,7 +153,6 @@ async def handle_challenge_flow( await await_result(self.on_response, request, response) return response - async def on_request(self, request: PipelineRequest) -> None: _enforce_tls(request) challenge = ChallengeCache.get_challenge_for_url(request.http_request.url) @@ -227,9 +223,7 @@ async def on_challenge(self, request: PipelineRequest, response: PipelineRespons if challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs"): await self.authorize_request(request, scope, claims=challenge.claims) else: - await self.authorize_request( - request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id - ) + await self.authorize_request(request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id) return True diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/aio/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/aio/_client.py index 2f1c30a96b01..41084614a416 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/aio/_client.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -84,12 +85,12 @@ def _get_attributes( return None def get_cryptography_client( - self, - key_name: str, - *, - key_version: Optional[str] = None, - **kwargs, # pylint: disable=unused-argument - ) -> CryptographyClient: + self, + key_name: str, + *, + key_version: Optional[str] = None, + **kwargs, # pylint: disable=unused-argument + ) -> CryptographyClient: """Gets a :class:`~azure.keyvault.keys.crypto.aio.CryptographyClient` for the given key. :param str key_name: The name of the key used to perform cryptographic operations. @@ -647,7 +648,7 @@ async def recover_deleted_key(self, name: str, **kwargs: Any) -> KeyVaultKey: command=command, final_resource=recovered_key, finished=False, - interval=polling_interval + interval=polling_interval, ) await polling_method.run() @@ -841,9 +842,7 @@ async def import_key( release_policy=policy, ) - bundle = await self._client.import_key( - name, parameters=parameters, **kwargs - ) + bundle = await self._client.import_key(name, parameters=parameters, **kwargs) return KeyVaultKey._from_key_bundle(bundle) @distributed_trace_async @@ -996,7 +995,7 @@ async def update_key_rotation_policy( # pylint: disable=unused-argument @distributed_trace_async async def get_key_attestation(self, name: str, version: Optional[str] = None, **kwargs: Any) -> KeyVaultKey: """Get a key and its attestation blob. - + This method is applicable to any key stored in Azure Key Vault Managed HSM. This operation requires the keys/get permission. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_client.py index d3a27fee66df..51f273d8f858 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_client.py @@ -32,13 +32,13 @@ def _validate_arguments( - operation: KeyOperation, - algorithm: EncryptionAlgorithm, - *, - iv: Optional[bytes] = None, - tag: Optional[bytes] = None, - aad: Optional[bytes] = None, - ) -> None: + operation: KeyOperation, + algorithm: EncryptionAlgorithm, + *, + iv: Optional[bytes] = None, + tag: Optional[bytes] = None, + aad: Optional[bytes] = None, +) -> None: """Validates the arguments passed to perform an operation with a provided algorithm. :param KeyOperation operation: the type of operation being requested @@ -55,9 +55,7 @@ def _validate_arguments( """ if operation == KeyOperation.encrypt: if iv and "CBC" not in algorithm: - raise ValueError( - f"iv should only be provided with AES-CBC algorithms; {algorithm} does not accept an iv" - ) + raise ValueError(f"iv should only be provided with AES-CBC algorithms; {algorithm} does not accept an iv") if iv is None and "CBC" in algorithm: raise ValueError("iv is a required parameter for encryption with AES-CBC algorithms.") if aad and not ("CBC" in algorithm or "GCM" in algorithm): @@ -68,9 +66,7 @@ def _validate_arguments( if operation == KeyOperation.decrypt: if iv and not ("CBC" in algorithm or "GCM" in algorithm): - raise ValueError( - f"iv should only be provided with AES algorithms; {algorithm} does not accept an iv" - ) + raise ValueError(f"iv should only be provided with AES algorithms; {algorithm} does not accept an iv") if iv is None and ("CBC" in algorithm or "GCM" in algorithm): raise ValueError("iv is a required parameter for decryption with AES algorithms.") if tag and "GCM" not in algorithm: @@ -203,7 +199,7 @@ def _initialize(self, **kwargs: Any) -> None: key_bundle = self._client.get_key( self._key_id.name if self._key_id else None, self._key_id.version if self._key_id else None, - **kwargs + **kwargs, ) key = KeyVaultKey._from_key_bundle(key_bundle) self._key = key.key @@ -310,7 +306,7 @@ def encrypt( parameters=self._models.KeyOperationsParameters( algorithm=algorithm, value=plaintext, iv=iv, aad=additional_authenticated_data ), - **kwargs + **kwargs, ) result_iv = operation_result.iv if hasattr(operation_result, "iv") else None @@ -400,7 +396,7 @@ def decrypt( parameters=self._models.KeyOperationsParameters( algorithm=algorithm, value=ciphertext, iv=iv, tag=authentication_tag, aad=additional_authenticated_data ), - **kwargs + **kwargs, ) return DecryptResult(key_id=self.key_id, algorithm=algorithm, plaintext=operation_result.result) @@ -443,7 +439,7 @@ def wrap_key(self, algorithm: KeyWrapAlgorithm, key: bytes, **kwargs: Any) -> Wr key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyOperationsParameters(algorithm=algorithm, value=key), - **kwargs + **kwargs, ) return WrapResult(key_id=self.key_id, algorithm=algorithm, encrypted_key=operation_result.result) @@ -485,7 +481,7 @@ def unwrap_key(self, algorithm: KeyWrapAlgorithm, encrypted_key: bytes, **kwargs key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyOperationsParameters(algorithm=algorithm, value=encrypted_key), - **kwargs + **kwargs, ) return UnwrapResult(key_id=self.key_id, algorithm=algorithm, key=operation_result.result) @@ -527,7 +523,7 @@ def sign(self, algorithm: SignatureAlgorithm, digest: bytes, **kwargs: Any) -> S key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeySignParameters(algorithm=algorithm, value=digest), - **kwargs + **kwargs, ) return SignResult(key_id=self.key_id, algorithm=algorithm, signature=operation_result.result) @@ -571,7 +567,7 @@ def verify(self, algorithm: SignatureAlgorithm, digest: bytes, signature: bytes, key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyVerifyParameters(algorithm=algorithm, digest=digest, signature=signature), - **kwargs + **kwargs, ) return VerifyResult(key_id=self.key_id, algorithm=algorithm, is_valid=operation_result.value) diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_enums.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_enums.py index fcce8d0929e7..356b72b5edc5 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_enums.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_enums.py @@ -5,6 +5,7 @@ from enum import Enum from azure.core import CaseInsensitiveEnumMeta + # pylint: disable=enum-must-be-uppercase class KeyWrapAlgorithm(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Key wrapping algorithms""" diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_internal/rsa_key.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_internal/rsa_key.py index e1325894bccc..0d3f21df4257 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_internal/rsa_key.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_internal/rsa_key.py @@ -33,7 +33,14 @@ class RsaKey(Key): # pylint:disable=too-many-public-methods _supported_encryption_algorithms = frozenset((Rsa1_5.name(), RsaOaep.name(), RsaOaep256.name())) _supported_key_wrap_algorithms = frozenset((Rsa1_5.name(), RsaOaep.name(), RsaOaep256.name())) _supported_signature_algorithms = frozenset( - (Ps256.name(), Ps384.name(), Ps512.name(), Rs256.name(), Rs384.name(), Rs512.name(),) + ( + Ps256.name(), + Ps384.name(), + Ps512.name(), + Rs256.name(), + Rs384.name(), + Rs512.name(), + ) ) def __init__(self, kid=None): diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py index c16d5811f7e9..2f1f6788a6ed 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_models.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/aio/_client.py b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/aio/_client.py index 9f7e370e983f..13111932ebbe 100644 --- a/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/aio/_client.py @@ -146,7 +146,7 @@ async def _initialize(self, **kwargs: Any) -> None: key_bundle = await self._client.get_key( self._key_id.name if self._key_id else None, self._key_id.version if self._key_id else None, - **kwargs + **kwargs, ) key = KeyVaultKey._from_key_bundle(key_bundle) self._key = key.key @@ -229,7 +229,7 @@ async def encrypt( parameters=self._models.KeyOperationsParameters( algorithm=algorithm, value=plaintext, iv=iv, aad=additional_authenticated_data ), - **kwargs + **kwargs, ) result_iv = operation_result.iv if hasattr(operation_result, "iv") else None @@ -319,7 +319,7 @@ async def decrypt( parameters=self._models.KeyOperationsParameters( algorithm=algorithm, value=ciphertext, iv=iv, tag=authentication_tag, aad=additional_authenticated_data ), - **kwargs + **kwargs, ) return DecryptResult(key_id=self.key_id, algorithm=algorithm, plaintext=operation_result.result) @@ -362,7 +362,7 @@ async def wrap_key(self, algorithm: KeyWrapAlgorithm, key: bytes, **kwargs: Any) key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyOperationsParameters(algorithm=algorithm, value=key), - **kwargs + **kwargs, ) return WrapResult(key_id=self.key_id, algorithm=algorithm, encrypted_key=operation_result.result) @@ -404,7 +404,7 @@ async def unwrap_key(self, algorithm: KeyWrapAlgorithm, encrypted_key: bytes, ** key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyOperationsParameters(algorithm=algorithm, value=encrypted_key), - **kwargs + **kwargs, ) return UnwrapResult(key_id=self.key_id, algorithm=algorithm, key=operation_result.result) @@ -447,7 +447,7 @@ async def sign(self, algorithm: SignatureAlgorithm, digest: bytes, **kwargs: Any key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeySignParameters(algorithm=algorithm, value=digest), - **kwargs + **kwargs, ) return SignResult(key_id=self.key_id, algorithm=algorithm, signature=operation_result.result) @@ -493,7 +493,7 @@ async def verify( key_name=self._key_id.name if self._key_id else None, key_version=self._key_id.version if self._key_id else None, parameters=self._models.KeyVerifyParameters(algorithm=algorithm, digest=digest, signature=signature), - **kwargs + **kwargs, ) return VerifyResult(key_id=self.key_id, algorithm=algorithm, is_valid=operation_result.value) diff --git a/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations.py b/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations.py index bdf7d1305fd3..2e3f83c2ff66 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -14,7 +15,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations_async.py b/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations_async.py index f6990ab87997..efef9cd89af4 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/backup_restore_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -34,13 +35,14 @@ # 5. Restore a key (restore_key_backup) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): # Instantiate a key client that will be used to call the service. # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. VAULT_URL = os.environ["VAULT_URL"] credential = DefaultAzureCredential() client = KeyClient(vault_url=VAULT_URL, credential=credential) - + # Let's create a Key of type RSA. # if the key already exists in the Key Vault, then a new version of the key is created. print("\n.. Create Key") diff --git a/sdk/keyvault/azure-keyvault-keys/samples/hello_world.py b/sdk/keyvault/azure-keyvault-keys/samples/hello_world.py index afb096383708..e323e555334f 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/hello_world.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/hello_world.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -12,7 +13,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -70,9 +71,7 @@ # associated with a key previously stored within Key Vault. print("\n.. Update a Key by name") expires = datetime.datetime.utcnow() + datetime.timedelta(days=365) -updated_ec_key = client.update_key_properties( - ec_key.name, ec_key.properties.version, expires_on=expires, enabled=False -) +updated_ec_key = client.update_key_properties(ec_key.name, ec_key.properties.version, expires_on=expires, enabled=False) print(f"Key with name '{updated_ec_key.name}' was updated on date '{updated_ec_key.properties.updated_on}'") print(f"Key with name '{updated_ec_key.name}' was updated to expire on '{updated_ec_key.properties.expires_on}'") diff --git a/sdk/keyvault/azure-keyvault-keys/samples/hello_world_async.py b/sdk/keyvault/azure-keyvault-keys/samples/hello_world_async.py index 864f14750aaa..87d3728d5253 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/hello_world_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/hello_world_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -35,6 +36,7 @@ # 5. Delete a key (delete_key) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): # Instantiate a key client that will be used to call the service. # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. diff --git a/sdk/keyvault/azure-keyvault-keys/samples/key_rotation.py b/sdk/keyvault/azure-keyvault-keys/samples/key_rotation.py index 248ac051899e..731f4d743805 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/key_rotation.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/key_rotation.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -14,7 +15,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-keys/samples/key_rotation_async.py b/sdk/keyvault/azure-keyvault-keys/samples/key_rotation_async.py index c20f614943a7..4ebad1c8cf59 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/key_rotation_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/key_rotation_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -36,6 +37,7 @@ # 5. Delete a key (delete_key) # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): # Instantiate a key client that will be used to call the service. # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. @@ -108,4 +110,4 @@ async def run_sample(): if __name__ == "__main__": - asyncio.run(run_sample()) \ No newline at end of file + asyncio.run(run_sample()) diff --git a/sdk/keyvault/azure-keyvault-keys/samples/list_operations.py b/sdk/keyvault/azure-keyvault-keys/samples/list_operations.py index f8b01807efc4..cf42f7006bb2 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/list_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/list_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -13,7 +14,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-keys/samples/list_operations_async.py b/sdk/keyvault/azure-keyvault-keys/samples/list_operations_async.py index 757017e45e00..e1a2c535508e 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/list_operations_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/list_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -35,6 +36,7 @@ # # ---------------------------------------------------------------------------------------------------------- + async def run_sample(): # Instantiate a key client that will be used to call the service. # Here we use the DefaultAzureCredential, but any azure-identity credential can be used. diff --git a/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations.py b/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations.py index 9b5b45985c97..7668cf456ac8 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -13,7 +14,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # diff --git a/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations_async.py b/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations_async.py index bcb8eb588df3..8d4f1c138c36 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations_async.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/recover_purge_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -7,6 +8,7 @@ from azure.identity.aio import DefaultAzureCredential from azure.keyvault.keys.aio import KeyClient + # ---------------------------------------------------------------------------------------------------------- # Prerequisites: # 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli) diff --git a/sdk/keyvault/azure-keyvault-keys/samples/send_request.py b/sdk/keyvault/azure-keyvault-keys/samples/send_request.py index 78d35dc3c8f2..c5e3f2b5a9c3 100644 --- a/sdk/keyvault/azure-keyvault-keys/samples/send_request.py +++ b/sdk/keyvault/azure-keyvault-keys/samples/send_request.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -14,7 +15,7 @@ # 2. azure-keyvault-keys and azure-identity libraries (pip install these) # # 3. Set environment variable VAULT_URL with the URL of your key vault -# +# # 4. Set up your environment to use azure-identity's DefaultAzureCredential. For more information about how to configure # the DefaultAzureCredential, refer to https://aka.ms/azsdk/python/identity/docs#azure.identity.DefaultAzureCredential # @@ -55,7 +56,7 @@ response = client.send_request(request) # The return value is an azure.core.rest.HttpResponse -- the key information is in the response body. -# We can get a dictionary of the body content with the `json` method. +# We can get a dictionary of the body content with the `json` method. response_body = response.json() print(f"\n.. Key with ID {response_body['key']['kid']} was found.") diff --git a/sdk/keyvault/azure-keyvault-keys/setup.py b/sdk/keyvault/azure-keyvault-keys/setup.py new file mode 100644 index 000000000000..fa2ff73b2e2b --- /dev/null +++ b/sdk/keyvault/azure-keyvault-keys/setup.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + + +import os +import re +from setuptools import setup, find_packages + + +PACKAGE_NAME = "azure-keyvault-keys" +PACKAGE_PPRINT_NAME = "Key Vault Keys" +PACKAGE_NAMESPACE = "azure.keyvault.keys._generated" + +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) + +if not version: + raise RuntimeError("Cannot find version information") + + +setup( + name=PACKAGE_NAME, + version=version, + description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + long_description=open("README.md", "r").read(), + long_description_content_type="text/markdown", + license="MIT License", + author="Microsoft Corporation", + author_email="azpysdkhelp@microsoft.com", + url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk", + keywords="azure, azure sdk", + classifiers=[ + "Development Status :: ", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: MIT License", + ], + zip_safe=False, + packages=find_packages( + exclude=[ + "tests", + ] + ), + include_package_data=True, + package_data={ + "azure.keyvault.keys._generated": ["py.typed"], + }, + install_requires=[ + "isodate>=0.6.1", + "azure-core>=1.30.0", + "typing-extensions>=4.6.0", + ], + python_requires=">=3.9", +) diff --git a/sdk/keyvault/azure-keyvault-keys/tests/_keys_test_case.py b/sdk/keyvault/azure-keyvault-keys/tests/_keys_test_case.py index d1e99ba811e3..d23ab5e15853 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/_keys_test_case.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/_keys_test_case.py @@ -13,7 +13,7 @@ def _get_attestation_uri(self): playback_uri = "https://fakeattestation.azurewebsites.net" if self.is_live: real_uri = os.environ.get("AZURE_KEYVAULT_ATTESTATION_URL") - real_uri = real_uri.rstrip('/') + real_uri = real_uri.rstrip("/") if real_uri is None: pytest.skip("No AZURE_KEYVAULT_ATTESTATION_URL environment variable") return real_uri @@ -22,9 +22,11 @@ def _get_attestation_uri(self): def create_crypto_client(self, key, **kwargs): if kwargs.pop("is_async", False): from azure.keyvault.keys.crypto.aio import CryptographyClient - credential = self.get_credential(CryptographyClient,is_async=True) + + credential = self.get_credential(CryptographyClient, is_async=True) else: from azure.keyvault.keys.crypto import CryptographyClient + credential = self.get_credential(CryptographyClient) return self.create_client_from_credential(CryptographyClient, credential=credential, key=key, **kwargs) diff --git a/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case.py b/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case.py index a67376fd53e1..87a1198ea5de 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case.py @@ -8,8 +8,6 @@ from devtools_testutils import AzureRecordedTestCase - - class KeyVaultTestCase(AzureRecordedTestCase): def get_resource_name(self, name): """helper to create resources with a consistent, test-indicative prefix""" diff --git a/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case_async.py b/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case_async.py index 6059c528f1a3..fb26f89f3ab3 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/_shared/test_case_async.py @@ -36,7 +36,7 @@ async def _poll_until_exception(self, fn, expected_exception, max_retries=20, re except expected_exception: return self.fail("expected exception {expected_exception} was not raised") - + def teardown_method(self, method): HttpChallengeCache.clear() assert len(HttpChallengeCache._cache) == 0 diff --git a/sdk/keyvault/azure-keyvault-keys/tests/conftest.py b/sdk/keyvault/azure-keyvault-keys/tests/conftest.py index bfcd18356825..3db4bb782e44 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/conftest.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/conftest.py @@ -13,16 +13,22 @@ add_oauth_response_sanitizer, is_live, remove_batch_sanitizers, - set_custom_default_matcher + set_custom_default_matcher, ) from azure.keyvault.keys._shared.client_base import DEFAULT_VERSION, ApiVersion -os.environ['PYTHONHASHSEED'] = '0' +os.environ["PYTHONHASHSEED"] = "0" ALL_API_VERSIONS = "--all-api-versions" + def pytest_addoption(parser): - parser.addoption(ALL_API_VERSIONS, action="store_true", default=False, - help="Test all api version in live mode. Not applicable in playback mode.") + parser.addoption( + ALL_API_VERSIONS, + action="store_true", + default=False, + help="Test all api version in live mode. Not applicable in playback mode.", + ) + def pytest_configure(config): if is_live() and not config.getoption(ALL_API_VERSIONS): @@ -30,16 +36,19 @@ def pytest_configure(config): else: pytest.api_version = ApiVersion + @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): azure_keyvault_url = os.getenv("AZURE_KEYVAULT_URL", "https://vaultname.vault.azure.net") azure_keyvault_url = azure_keyvault_url.rstrip("/") keyvault_tenant_id = os.getenv("KEYVAULT_TENANT_ID", "keyvault_tenant_id") keyvault_subscription_id = os.getenv("KEYVAULT_SUBSCRIPTION_ID", "keyvault_subscription_id") - azure_managedhsm_url = os.environ.get("AZURE_MANAGEDHSM_URL","https://managedhsmvaultname.managedhsm.azure.net") + azure_managedhsm_url = os.environ.get("AZURE_MANAGEDHSM_URL", "https://managedhsmvaultname.managedhsm.azure.net") azure_managedhsm_url = azure_managedhsm_url.rstrip("/") - azure_attestation_uri = os.environ.get("AZURE_KEYVAULT_ATTESTATION_URL","https://fakeattestation.azurewebsites.net") - azure_attestation_uri = azure_attestation_uri.rstrip('/') + azure_attestation_uri = os.environ.get( + "AZURE_KEYVAULT_ATTESTATION_URL", "https://fakeattestation.azurewebsites.net" + ) + azure_attestation_uri = azure_attestation_uri.rstrip("/") add_general_string_sanitizer(target=azure_keyvault_url, value="https://vaultname.vault.azure.net") add_general_string_sanitizer(target=keyvault_tenant_id, value="00000000-0000-0000-0000-000000000000") @@ -52,7 +61,12 @@ def add_sanitizers(test_proxy): # Remove the following sanitizers since certain fields are needed in tests and are non-sensitive: # - AZSDK3430: $..id # - AZSDK3447: $.key - remove_batch_sanitizers(["AZSDK3430", "AZSDK3447",]) + remove_batch_sanitizers( + [ + "AZSDK3430", + "AZSDK3447", + ] + ) @pytest.fixture(scope="session", autouse=True) @@ -80,6 +94,7 @@ def immediate_return(_): else: yield + @pytest.fixture(scope="session") def event_loop(request): loop = asyncio.get_event_loop() diff --git a/sdk/keyvault/azure-keyvault-keys/tests/keys.py b/sdk/keyvault/azure-keyvault-keys/tests/keys.py index 80db438ae5f2..428b82eb21e0 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/keys.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/keys.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-keys/tests/perfstress_tests/sign.py b/sdk/keyvault/azure-keyvault-keys/tests/perfstress_tests/sign.py index e98cbd1ce11c..469de42d8a35 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/perfstress_tests/sign.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/perfstress_tests/sign.py @@ -25,6 +25,7 @@ def __init__(self, arguments): super().__init__(arguments) from dotenv import load_dotenv + load_dotenv() # Auth configuration diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth.py b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth.py index de26cc59f07d..b889b052484f 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth.py @@ -36,6 +36,7 @@ TOKEN_TYPES = [AccessToken, AccessTokenInfo] + class TestChallengeAuth(KeyVaultTestCase, KeysTestCase): @pytest.mark.parametrize("api_version,is_hsm", only_default_version) @KeysClientPreparer() @@ -125,7 +126,6 @@ def test_enforces_tls(): pipeline.run(HttpRequest("GET", url)) - def test_challenge_cache(): url_a = get_random_url() challenge_a = HttpChallenge(url_a, "Bearer authorization=authority A, resource=resource A") @@ -148,9 +148,7 @@ def test_challenge_parsing(): tenant = "tenant" authority = f"https://login.authority.net/{tenant}" resource = "https://challenge.resource" - challenge = HttpChallenge( - "https://request.uri", challenge=f"Bearer authorization={authority}, resource={resource}" - ) + challenge = HttpChallenge("https://request.uri", challenge=f"Bearer authorization={authority}, resource={resource}") assert challenge.get_authorization_server() == authority assert challenge.get_resource() == resource @@ -548,8 +546,8 @@ def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) transport_2 = validating_transport( requests=[Request(), Request(required_headers={"Authorization": f"Bearer {token}"})], @@ -557,8 +555,8 @@ def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) client = KeyClient(url, credential, transport=transport, verify_challenge_resource=verify_challenge_resource) @@ -603,8 +601,8 @@ def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) client = KeyClient(url, credential, transport=transport, verify_challenge_resource=verify_challenge_resource) diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py index 81ec711f6ad2..4834e9037d3a 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_challenge_auth_async.py @@ -19,7 +19,7 @@ from azure.core.pipeline import AsyncPipeline from azure.core.pipeline.policies import SansIOHTTPPolicy from azure.core.rest import HttpRequest -from azure.keyvault.keys._shared import AsyncChallengeAuthPolicy,HttpChallenge, HttpChallengeCache +from azure.keyvault.keys._shared import AsyncChallengeAuthPolicy, HttpChallenge, HttpChallengeCache from azure.keyvault.keys._shared.client_base import DEFAULT_VERSION from azure.keyvault.keys.aio import KeyClient from devtools_testutils.aio import recorded_by_proxy_async @@ -45,7 +45,7 @@ class TestChallengeAuth(KeyVaultTestCase): @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_default_version) + @pytest.mark.parametrize("api_version,is_hsm", only_default_version) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_multitenant_authentication(self, client, is_hsm, **kwargs): @@ -131,9 +131,7 @@ async def get_token(*scopes, **_): credential = Mock(spec_set=["get_token"], get_token=Mock(wraps=get_token)) else: credential = Mock(spec_set=["get_token_info"], get_token_info=Mock(wraps=get_token)) - pipeline = AsyncPipeline( - policies=[AsyncChallengeAuthPolicy(credential=credential)], transport=Mock(send=send) - ) + pipeline = AsyncPipeline(policies=[AsyncChallengeAuthPolicy(credential=credential)], transport=Mock(send=send)) request = HttpRequest("POST", get_random_url()) request.set_bytes_body(expected_content) await pipeline.run(request) @@ -200,9 +198,7 @@ async def get_token(*_, options=None, **kwargs): credential = Mock(spec_set=["get_token"], get_token=Mock(wraps=get_token)) else: credential = Mock(spec_set=["get_token_info"], get_token_info=Mock(wraps=get_token)) - pipeline = AsyncPipeline( - policies=[AsyncChallengeAuthPolicy(credential=credential)], transport=Mock(send=send) - ) + pipeline = AsyncPipeline(policies=[AsyncChallengeAuthPolicy(credential=credential)], transport=Mock(send=send)) request = HttpRequest("POST", get_random_url()) request.set_bytes_body(expected_content) await pipeline.run(request) @@ -495,8 +491,8 @@ async def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) transport_2 = async_validating_transport( requests=[Request(), Request(required_headers={"Authorization": f"Bearer {token}"})], @@ -504,8 +500,8 @@ async def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) client = KeyClient(url, credential, transport=transport, verify_challenge_resource=verify_challenge_resource) @@ -550,8 +546,8 @@ async def get_token(*_, **__): mock_response( status_code=401, headers={"WWW-Authenticate": f'Bearer authorization="{url}", resource={resource}'} ), - mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}) - ] + mock_response(status_code=200, json_payload={"key": {"kid": f"{url}/key-name"}}), + ], ) client = KeyClient(url, credential, transport=transport, verify_challenge_resource=verify_challenge_resource) diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py b/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py index 45cb23d5b84b..382bee35dfb7 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -20,7 +21,7 @@ rsa_crt_dmq1, rsa_crt_iqmp, RSAPrivateNumbers, - RSAPublicNumbers + RSAPublicNumbers, ) from cryptography.hazmat.primitives.serialization import Encoding, NoEncryption, PrivateFormat, PublicFormat import pytest @@ -60,21 +61,21 @@ def _to_bytes(hex): # RSA key with private components so that the JWK can be used for private operations TEST_JWK = { - "kty":"RSA", - "key_ops":["decrypt", "verify", "unwrapKey"], - "n":_to_bytes( + "kty": "RSA", + "key_ops": ["decrypt", "verify", "unwrapKey"], + "n": _to_bytes( "00a0914d00234ac683b21b4c15d5bed887bdc959c2e57af54ae734e8f00720d775d275e455207e3784ceeb60a50a4655dd72a7a94d271e8ee8f7959a669ca6e775bf0e23badae991b4529d978528b4bd90521d32dd2656796ba82b6bbfc7668c8f5eeb5053747fd199319d29a8440d08f4412d527ff9311eda71825920b47b1c46b11ab3e91d7316407e89c7f340f7b85a34042ce51743b27d4718403d34c7b438af6181be05e4d11eb985d38253d7fe9bf53fc2f1b002d22d2d793fa79a504b6ab42d0492804d7071d727a06cf3a8893aa542b1503f832b296371b6707d4dc6e372f8fe67d8ded1c908fde45ce03bc086a71487fa75e43aa0e0679aa0d20efe35" ), - "e":_to_bytes("10001"), - "p":_to_bytes( + "e": _to_bytes("10001"), + "p": _to_bytes( "00d1deac8d68ddd2c1fd52d5999655b2cf1565260de5269e43fd2a85f39280e1708ffff0682166cb6106ee5ea5e9ffd9f98d0becc9ff2cda2febc97259215ad84b9051e563e14a051dce438bc6541a24ac4f014cf9732d36ebfc1e61a00d82cbe412090f7793cfbd4b7605be133dfc3991f7e1bed5786f337de5036fc1e2df4cf3" ), - "q":_to_bytes( + "q": _to_bytes( "00c3dc66b641a9b73cd833bc439cd34fc6574465ab5b7e8a92d32595a224d56d911e74624225b48c15a670282a51c40d1dad4bc2e9a3c8dab0c76f10052dfb053bc6ed42c65288a8e8bace7a8881184323f94d7db17ea6dfba651218f931a93b8f738f3d8fd3f6ba218d35b96861a0f584b0ab88ddcf446b9815f4d287d83a3237" ), - "d":_to_bytes( + "d": _to_bytes( "627c7d24668148fe2252c7fa649ea8a5a9ed44d75c766cda42b29b660e99404f0e862d4561a6c95af6a83d213e0a2244b03cd28576473215073785fb067f015da19084ade9f475e08b040a9a2c7ba00253bb8125508c9df140b75161d266be347a5e0f6900fe1d8bbf78ccc25eeb37e0c9d188d6e1fc15169ba4fe12276193d77790d2326928bd60d0d01d6ead8d6ac4861abadceec95358fd6689c50a1671a4a936d2376440a41445501da4e74bfb98f823bd19c45b94eb01d98fc0d2f284507f018ebd929b8180dbe6381fdd434bffb7800aaabdd973d55f9eaf9bb88a6ea7b28c2a80231e72de1ad244826d665582c2362761019de2e9f10cb8bcc2625649" - ) + ), } @@ -111,9 +112,10 @@ def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops assert key.kty == kty, f"kty should by '{key}', but is '{key.kty}'" assert key.n and key.e, "Bad RSA public material." assert sorted(key_ops) == sorted(key.key_ops), f"keyOps should be '{key_ops}', but is '{key.key_ops}'" - - assert key_attributes.properties.created_on and key_attributes.properties.updated_on, "Missing required date attributes." - + + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kty): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) @@ -122,7 +124,9 @@ def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kt assert key_curve == key.crv assert kid.index(prefix) == 0, f"Key Id should start with '{prefix}', but value is '{kid}'" assert key.kty == kty, f"kty should by '{key}', but is '{key.kty}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _import_test_key(self, client, name, hardware_protected=False): key = JsonWebKey( @@ -169,7 +173,7 @@ def _import_symmetric_test_key(self, client, name): assert key_vault_key.key.kid == imported_key.id == key_vault_key.id return key_vault_key - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_ec_key_id(self, key_client, is_hsm, **kwargs): @@ -185,7 +189,7 @@ def test_ec_key_id(self, key_client, is_hsm, **kwargs): crypto_client.verify(SignatureAlgorithm.es256_k, hashlib.sha256(self.plaintext).digest(), self.plaintext) - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_rsa_key_id(self, key_client, is_hsm, **kwargs): @@ -399,7 +403,7 @@ def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): encrypt_result.ciphertext, iv=encrypt_result.iv, authentication_tag=encrypt_result.tag, - additional_authenticated_data=self.aad + additional_authenticated_data=self.aad, ) else: encrypt_result = crypto_client.encrypt( @@ -410,13 +414,15 @@ def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): encrypt_result.algorithm, encrypt_result.ciphertext, iv=encrypt_result.iv, - additional_authenticated_data=None if "CBC" in algorithm else self.aad + additional_authenticated_data=None if "CBC" in algorithm else self.aad, ) assert decrypt_result.key_id == imported_key.id assert decrypt_result.algorithm == algorithm if algorithm.endswith("CBC"): - assert decrypt_result.plaintext.startswith(self.plaintext) # AES-CBC returns a zero-padded plaintext + assert decrypt_result.plaintext.startswith( + self.plaintext + ) # AES-CBC returns a zero-padded plaintext else: assert decrypt_result.plaintext == self.plaintext @@ -436,7 +442,7 @@ def test_symmetric_wrap_and_unwrap(self, key_client, **kwargs): result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key) assert result.key == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_encrypt_local(self, key_client, is_hsm, **kwargs): @@ -453,7 +459,7 @@ def test_encrypt_local(self, key_client, is_hsm, **kwargs): result = crypto_client.decrypt(result.algorithm, result.ciphertext) assert result.plaintext == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_encrypt_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -470,8 +476,8 @@ def test_encrypt_local_from_jwk(self, key_client, is_hsm, **kwargs): result = crypto_client.decrypt(result.algorithm, result.ciphertext) assert result.plaintext == self.plaintext - - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @KeysClientPreparer() @recorded_by_proxy def test_symmetric_encrypt_local(self, key_client, **kwargs): @@ -499,7 +505,7 @@ def test_symmetric_encrypt_local(self, key_client, **kwargs): assert decrypt_result.key_id == imported_key.id assert decrypt_result.algorithm == algorithm assert decrypt_result.plaintext == self.plaintext - + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @KeysClientPreparer() @recorded_by_proxy @@ -524,14 +530,14 @@ def test_symmetric_decrypt_local(self, key_client, **kwargs): encrypt_result.algorithm, encrypt_result.ciphertext, iv=encrypt_result.iv, - additional_authenticated_data=self.aad + additional_authenticated_data=self.aad, ) assert decrypt_result.key_id == imported_key.id assert decrypt_result.algorithm == algorithm assert decrypt_result.plaintext == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_wrap_local(self, key_client, is_hsm, **kwargs): @@ -547,7 +553,7 @@ def test_wrap_local(self, key_client, is_hsm, **kwargs): result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key) assert result.key == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_wrap_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -564,7 +570,7 @@ def test_wrap_local_from_jwk(self, key_client, is_hsm, **kwargs): result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key) assert result.key == self.plaintext - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_rsa_verify_local(self, key_client, is_hsm, **kwargs): @@ -589,7 +595,7 @@ def test_rsa_verify_local(self, key_client, is_hsm, **kwargs): result = crypto_client.verify(result.algorithm, digest, result.signature) assert result.is_valid - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -600,12 +606,12 @@ def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): crypto_client = self.create_crypto_client(key, api_version=key_client.api_version) local_client = CryptographyClient.from_jwk(key.key) for signature_algorithm, hash_function in ( - (SignatureAlgorithm.ps256, hashlib.sha256), - (SignatureAlgorithm.ps384, hashlib.sha384), - (SignatureAlgorithm.ps512, hashlib.sha512), - (SignatureAlgorithm.rs256, hashlib.sha256), - (SignatureAlgorithm.rs384, hashlib.sha384), - (SignatureAlgorithm.rs512, hashlib.sha512), + (SignatureAlgorithm.ps256, hashlib.sha256), + (SignatureAlgorithm.ps384, hashlib.sha384), + (SignatureAlgorithm.ps512, hashlib.sha512), + (SignatureAlgorithm.rs256, hashlib.sha256), + (SignatureAlgorithm.rs384, hashlib.sha384), + (SignatureAlgorithm.rs512, hashlib.sha512), ): digest = hash_function(self.plaintext).digest() @@ -615,7 +621,7 @@ def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): result = local_client.verify(result.algorithm, digest, result.signature) assert result.is_valid - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_ec_verify_local(self, key_client, is_hsm, **kwargs): @@ -640,7 +646,7 @@ def test_ec_verify_local(self, key_client, is_hsm, **kwargs): result = crypto_client.verify(result.algorithm, digest, result.signature) assert result.is_valid - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_ec_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -666,11 +672,12 @@ def test_ec_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): result = local_client.verify(result.algorithm, digest, result.signature) assert result.is_valid - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_local_validity_period_enforcement(self, key_client, is_hsm, **kwargs): """Local crypto operations should respect a key's nbf and exp properties""" + def test_operations(key, expected_error_substrings, encrypt_algorithms, wrap_algorithms): crypto_client = self.create_crypto_client(key, api_version=key_client.api_version) for algorithm in encrypt_algorithms: @@ -713,7 +720,7 @@ def test_operations(key, expected_error_substrings, encrypt_algorithms, wrap_alg valid_key, (str(the_year_3000), str(the_year_3001)), rsa_encryption_algorithms, rsa_wrap_algorithms ) - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_send_request(self, key_client, is_hsm, **kwargs): @@ -734,7 +741,7 @@ def test_send_request(self, key_client, is_hsm, **kwargs): method="POST", url=f"keys/{key_name}/{imported_key.properties.version}/sign", headers={"Accept": "application/json"}, - json=json + json=json, ) response = crypto_client.send_request(request) response.raise_for_status() @@ -1081,7 +1088,7 @@ def test_rsa_public_key_public_bytes(): public_numbers = public_key.public_numbers() crypto_public_numbers = RSAPublicNumbers(e=public_numbers.e, n=public_numbers.n) crypto_public_bytes = crypto_public_numbers.public_key().public_bytes(Encoding.PEM, PublicFormat.PKCS1) - assert public_bytes == crypto_public_bytes + assert public_bytes == crypto_public_bytes def test_rsa_public_key_private_key_size(): diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client_async.py index 18bd7007d5e6..ddbb2207e98f 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -70,8 +71,9 @@ def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops assert key.kty == kty, f"kty should by '{key}', but is '{key.kty}'" assert key.n and key.e, "Bad RSA public material." assert sorted(key_ops) == sorted(key.key_ops), f"keyOps should be '{key_ops}', but is '{key.key_ops}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." - + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kty): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) @@ -80,7 +82,9 @@ def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kt assert key_curve == key.crv assert kid.index(prefix) == 0, f"Key Id should start with '{prefix}', but value is '{kid}'" assert key.kty == kty, f"kty should by '{key}', but is '{key.kty}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." async def _import_test_key(self, client, name, hardware_protected=False): def _to_bytes(hex): @@ -133,7 +137,7 @@ async def _import_symmetric_test_key(self, client, name): return key_vault_key @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_ec_key_id(self, key_client, is_hsm, **kwargs): @@ -150,7 +154,7 @@ async def test_ec_key_id(self, key_client, is_hsm, **kwargs): await crypto_client.verify(SignatureAlgorithm.es256, hashlib.sha256(self.plaintext).digest(), self.plaintext) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_rsa_key_id(self, key_client, is_hsm, **kwargs): @@ -169,7 +173,7 @@ async def test_rsa_key_id(self, key_client, is_hsm, **kwargs): await crypto_client.wrap_key(KeyWrapAlgorithm.rsa_oaep, self.plaintext) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_encrypt_and_decrypt(self, key_client, is_hsm, **kwargs): @@ -188,7 +192,7 @@ async def test_encrypt_and_decrypt(self, key_client, is_hsm, **kwargs): assert self.plaintext == result.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_sign_and_verify(self, key_client, is_hsm, **kwargs): @@ -211,7 +215,7 @@ async def test_sign_and_verify(self, key_client, is_hsm, **kwargs): assert verified.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_wrap_and_unwrap(self, key_client, is_hsm, **kwargs): @@ -231,7 +235,7 @@ async def test_wrap_and_unwrap(self, key_client, is_hsm, **kwargs): assert key_bytes == result.key @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): @@ -257,7 +261,7 @@ async def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): result.ciphertext, iv=result.iv, authentication_tag=result.tag, - additional_authenticated_data=self.aad + additional_authenticated_data=self.aad, ) else: result = await crypto_client.encrypt( @@ -268,7 +272,7 @@ async def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): result.algorithm, result.ciphertext, iv=self.iv, - additional_authenticated_data=None if "CBC" in algorithm else self.aad + additional_authenticated_data=None if "CBC" in algorithm else self.aad, ) assert result.key_id == imported_key.id @@ -279,7 +283,7 @@ async def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs): assert result.plaintext == self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_symmetric_wrap_and_unwrap(self, key_client, **kwargs): @@ -296,7 +300,7 @@ async def test_symmetric_wrap_and_unwrap(self, key_client, **kwargs): assert result.key == self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_encrypt_local(self, key_client, is_hsm, **kwargs): @@ -333,7 +337,7 @@ async def test_encrypt_local_from_jwk(self, key_client, is_hsm, **kwargs): assert result.plaintext, self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_symmetric_encrypt_local(self, key_client, **kwargs): @@ -363,7 +367,7 @@ async def test_symmetric_encrypt_local(self, key_client, **kwargs): assert decrypt_result.plaintext == self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_symmetric_decrypt_local(self, key_client, **kwargs): @@ -387,7 +391,7 @@ async def test_symmetric_decrypt_local(self, key_client, **kwargs): encrypt_result.algorithm, encrypt_result.ciphertext, iv=encrypt_result.iv, - additional_authenticated_data=self.aad + additional_authenticated_data=self.aad, ) assert decrypt_result.key_id == imported_key.id @@ -395,7 +399,7 @@ async def test_symmetric_decrypt_local(self, key_client, **kwargs): assert decrypt_result.plaintext == self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_wrap_local(self, key_client, is_hsm, **kwargs): @@ -412,7 +416,7 @@ async def test_wrap_local(self, key_client, is_hsm, **kwargs): assert result.key, self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_wrap_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -430,7 +434,7 @@ async def test_wrap_local_from_jwk(self, key_client, is_hsm, **kwargs): assert result.key, self.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_rsa_verify_local(self, key_client, is_hsm, **kwargs): @@ -456,7 +460,7 @@ async def test_rsa_verify_local(self, key_client, is_hsm, **kwargs): assert result.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -467,12 +471,12 @@ async def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): crypto_client = self.create_crypto_client(key, is_async=True, api_version=key_client.api_version) local_client = CryptographyClient.from_jwk(key.key) for signature_algorithm, hash_function in ( - (SignatureAlgorithm.ps256, hashlib.sha256), - (SignatureAlgorithm.ps384, hashlib.sha384), - (SignatureAlgorithm.ps512, hashlib.sha512), - (SignatureAlgorithm.rs256, hashlib.sha256), - (SignatureAlgorithm.rs384, hashlib.sha384), - (SignatureAlgorithm.rs512, hashlib.sha512), + (SignatureAlgorithm.ps256, hashlib.sha256), + (SignatureAlgorithm.ps384, hashlib.sha384), + (SignatureAlgorithm.ps512, hashlib.sha512), + (SignatureAlgorithm.rs256, hashlib.sha256), + (SignatureAlgorithm.rs384, hashlib.sha384), + (SignatureAlgorithm.rs512, hashlib.sha512), ): digest = hash_function(self.plaintext).digest() @@ -483,7 +487,7 @@ async def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): assert result.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_ec_verify_local(self, key_client, is_hsm, **kwargs): @@ -509,7 +513,7 @@ async def test_ec_verify_local(self, key_client, is_hsm, **kwargs): assert result.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_ec_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): @@ -536,11 +540,12 @@ async def test_ec_verify_local_from_jwk(self, key_client, is_hsm, **kwargs): assert result.is_valid @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_local_validity_period_enforcement(self, key_client, is_hsm, **kwargs): """Local crypto operations should respect a key's nbf and exp properties""" + async def test_operations(key, expected_error_substrings, encrypt_algorithms, wrap_algorithms): crypto_client = self.create_crypto_client(key, is_async=True, api_version=key_client.api_version) crypto_client._keys_get_forbidden = True # Prevent caching key material locally, to force remote ops @@ -587,7 +592,7 @@ async def test_operations(key, expected_error_substrings, encrypt_algorithms, wr ) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_send_request(self, key_client, is_hsm, **kwargs): @@ -608,7 +613,7 @@ async def test_send_request(self, key_client, is_hsm, **kwargs): method="POST", url=f"keys/{key_name}/{imported_key.properties.version}/sign", headers={"Accept": "application/json"}, - json=json + json=json, ) response = await crypto_client.send_request(request) response.raise_for_status() @@ -632,7 +637,10 @@ class CustomHookPolicy(SansIOHTTPPolicy): @pytest.mark.asyncio async def test_symmetric_wrap_and_unwrap_local(): key = KeyVaultKey( - key_id="http://localhost/keys/key/version", k=os.urandom(32), kty="oct", key_ops=["unwrapKey", "wrapKey"], + key_id="http://localhost/keys/key/version", + k=os.urandom(32), + kty="oct", + key_ops=["unwrapKey", "wrapKey"], ) crypto_client = CryptographyClient(key, credential=lambda *_: None) @@ -795,7 +803,7 @@ async def test_local_only_mode_no_service_calls(): async def test_local_only_mode_raise(): """A local-only CryptographyClient should raise an exception if an operation can't be performed locally""" - jwk = {"kty":"RSA", "key_ops":["decrypt", "verify", "unwrapKey"], "n":b"10011", "e":b"10001"} + jwk = {"kty": "RSA", "key_ops": ["decrypt", "verify", "unwrapKey"], "n": b"10011", "e": b"10001"} client = CryptographyClient.from_jwk(jwk=jwk) # Algorithm not supported locally @@ -908,7 +916,7 @@ async def test_aes_cbc_iv_validation(): @pytest.mark.asyncio async def test_encrypt_argument_validation(): """The client should raise an error when arguments don't work with the specified algorithm""" - + mock_client = mock.Mock() key = mock.Mock( spec=KeyVaultKey, diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto.py b/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto.py index a1cdd20eeb4d..82f3aef073b2 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto.py @@ -12,6 +12,7 @@ all_api_versions = get_decorator(only_vault=True) + class TestCryptoExamples(KeyVaultTestCase, KeysTestCase): @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @@ -59,7 +60,9 @@ def test_wrap_unwrap(self, key_client, **kwargs): key = key_client.create_rsa_key(key_name) client = CryptographyClient(key, credential, api_version=key_client.api_version) - key_bytes = b'\xc5\xb0\xfc\xf1C\x8a\x88pj\x11\x8d\xe5\x94\xe8\xff\x04\x0eY\xfeu\x8a\xe9<\x06(\xdb\x7f\xa9~\x85\x02\x04' + key_bytes = ( + b"\xc5\xb0\xfc\xf1C\x8a\x88pj\x11\x8d\xe5\x94\xe8\xff\x04\x0eY\xfeu\x8a\xe9<\x06(\xdb\x7f\xa9~\x85\x02\x04" + ) # [START wrap_key] from azure.keyvault.keys.crypto import KeyWrapAlgorithm diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto_async.py index 287d673c65db..08320d36b5a4 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_examples_crypto_async.py @@ -15,7 +15,7 @@ class TestCryptoExamples(KeyVaultTestCase): @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_encrypt_decrypt_async(self, key_client, **kwargs): @@ -58,7 +58,7 @@ async def test_encrypt_decrypt_async(self, key_client, **kwargs): # [END decrypt] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_wrap_unwrap_async(self, key_client, **kwargs): @@ -67,7 +67,9 @@ async def test_wrap_unwrap_async(self, key_client, **kwargs): key = await key_client.create_rsa_key(key_name) client = CryptographyClient(key, credential, api_version=key_client.api_version) - key_bytes = b'\xc5\xb0\xfc\xf1C\x8a\x88pj\x11\x8d\xe5\x94\xe8\xff\x04\x0eY\xfeu\x8a\xe9<\x06(\xdb\x7f\xa9~\x85\x02\x04' + key_bytes = ( + b"\xc5\xb0\xfc\xf1C\x8a\x88pj\x11\x8d\xe5\x94\xe8\xff\x04\x0eY\xfeu\x8a\xe9<\x06(\xdb\x7f\xa9~\x85\x02\x04" + ) # [START wrap_key] from azure.keyvault.keys.crypto import KeyWrapAlgorithm @@ -86,7 +88,7 @@ async def test_wrap_unwrap_async(self, key_client, **kwargs): # [END unwrap_key] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_sign_verify_async(self, key_client, **kwargs): diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_key_client.py b/sdk/keyvault/azure-keyvault-keys/tests/test_key_client.py index 9fe97370442c..18d2a9d8c8f2 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_key_client.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_key_client.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -23,7 +24,7 @@ KeyRotationLifetimeAction, KeyRotationPolicy, KeyRotationPolicyAction, - KeyType + KeyType, ) from azure.keyvault.keys._generated.models import KeyRotationPolicy as _KeyRotationPolicy from azure.keyvault.keys._shared.client_base import DEFAULT_VERSION @@ -54,6 +55,7 @@ def _assert_rotation_policies_equal(p1, p2): assert p1.updated_on == p2.updated_on assert len(p1.lifetime_actions) == len(p2.lifetime_actions) + def _assert_lifetime_actions_equal(a1, a2): assert a1.action == a2.action assert a1.time_after_create == a2.time_after_create @@ -115,8 +117,9 @@ def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kt assert key_curve == key.crv assert kid.index(prefix) == 0, f"Key Id should start with '{prefix}', but value is '{kid}'" assert key.kty == kty, f"kty should be '{kty}', but is '{key.kty}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." - + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) @@ -126,7 +129,9 @@ def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops assert key.kty == kty, f"kty should be '{kty}', but is '{key.kty}'" assert key.n and key.e, "Bad RSA public material." assert sorted(key_ops) == sorted(key.key_ops), f"keyOps should be '{key_ops}', but is '{key.key_ops}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on, "Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _update_key_properties(self, client, key, release_policy=None): expires = date_parse.parse("2050-01-02T08:00:00.000Z") @@ -184,7 +189,7 @@ def _to_bytes(hex): self._validate_rsa_key_bundle(imported_key, client.vault_url, name, key.kty, key.key_ops) return imported_key - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_key_crud_operations(self, client, is_hsm, **kwargs): @@ -210,7 +215,7 @@ def test_key_crud_operations(self, client, is_hsm, **kwargs): # create rsa key rsa_key_name = self.get_resource_name("crud-rsa-key") tags = {"purpose": "unit test", "test name ": "CreateRSAKeyTest"} - key_ops = ["encrypt","decrypt","sign","verify","wrapKey","unwrapKey"] + key_ops = ["encrypt", "decrypt", "sign", "verify", "wrapKey", "unwrapKey"] rsa_key = self._create_rsa_key( client, key_name=rsa_key_name, key_operations=key_ops, size=2048, tags=tags, hardware_protected=is_hsm ) @@ -239,8 +244,10 @@ def test_key_crud_operations(self, client, is_hsm, **kwargs): # aside from key_ops, the original updated keys should have the same JWKs self._assert_jwks_equal(rsa_key.key, deleted_key.key) assert deleted_key.id == rsa_key.id - assert deleted_key.recovery_id and deleted_key.deleted_date and deleted_key.scheduled_purge_date, "Missing required deleted key attributes." - + assert ( + deleted_key.recovery_id and deleted_key.deleted_date and deleted_key.scheduled_purge_date + ), "Missing required deleted key attributes." + deleted_key_poller.wait() # get the deleted key when soft deleted enabled @@ -248,7 +255,7 @@ def test_key_crud_operations(self, client, is_hsm, **kwargs): assert deleted_key is not None assert rsa_key.id == deleted_key.id - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @KeysClientPreparer() @recorded_by_proxy def test_rsa_public_exponent(self, client, **kwargs): @@ -260,7 +267,7 @@ def test_rsa_public_exponent(self, client, **kwargs): public_exponent = key.key.e[0] assert public_exponent == 17 - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_backup_restore(self, client, is_hsm, **kwargs): @@ -286,7 +293,7 @@ def test_backup_restore(self, client, is_hsm, **kwargs): restored_key = self._poll_until_no_exception(restore_function, ResourceExistsError) self._assert_key_attributes_equal(created_bundle.properties, restored_key.properties) - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_key_list(self, client, is_hsm, **kwargs): @@ -309,7 +316,7 @@ def test_key_list(self, client, is_hsm, **kwargs): del expected[key.name] assert len(expected) == 0 - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_list_versions(self, client, is_hsm, **kwargs): @@ -336,7 +343,7 @@ def test_list_versions(self, client, is_hsm, **kwargs): assert 0 == len(expected) @pytest.mark.skip("Temporarily disabled due to service issue") - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_list_deleted_keys(self, client, is_hsm, **kwargs): @@ -367,7 +374,7 @@ def test_list_deleted_keys(self, client, is_hsm, **kwargs): del expected[key.name] @pytest.mark.skip("Temporarily disabled due to service issue") - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_recover(self, client, is_hsm, **kwargs): @@ -393,7 +400,7 @@ def test_recover(self, client, is_hsm, **kwargs): expected_key = keys[key_name] self._assert_key_attributes_equal(expected_key.properties, recovered_key.properties) - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_purge(self, client, is_hsm, **kwargs): @@ -424,8 +431,8 @@ def test_purge(self, client, is_hsm, **kwargs): deleted = [s.name for s in client.list_deleted_keys()] assert not any(s in deleted for s in key_names) - @pytest.mark.parametrize("api_version,is_hsm",logging_enabled) - @KeysClientPreparer(logging_enable = True) + @pytest.mark.parametrize("api_version,is_hsm", logging_enabled) + @KeysClientPreparer(logging_enable=True) @recorded_by_proxy def test_logging_enabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() @@ -459,8 +466,8 @@ def test_logging_enabled(self, client, is_hsm, **kwargs): mock_handler.close() assert False, "Expected request body wasn't logged" - @pytest.mark.parametrize("api_version,is_hsm",logging_enabled) - @KeysClientPreparer(logging_enable = False) + @pytest.mark.parametrize("api_version,is_hsm", logging_enabled) + @KeysClientPreparer(logging_enable=False) @recorded_by_proxy def test_logging_disabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() @@ -493,7 +500,7 @@ def test_logging_disabled(self, client, is_hsm, **kwargs): mock_handler.close() - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_get_random_bytes(self, client, **kwargs): @@ -509,11 +516,11 @@ def test_get_random_bytes(self, client, **kwargs): assert all(random_bytes != rb for rb in generated_random_bytes) generated_random_bytes.append(random_bytes) - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_key_release(self, client, is_hsm, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") if is_hsm and client.api_version == ApiVersion.V7_5: pytest.skip("Currently failing on 7.5-preview.1; skipping for now") @@ -538,7 +545,7 @@ def test_key_release(self, client, is_hsm, **kwargs): if self.is_live and "Target environment attestation statement cannot be verified" in ex.message: pytest.skip("Target environment attestation statement cannot be verified. Likely transient failure.") - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_imported_key_release(self, client, **kwargs): @@ -560,11 +567,11 @@ def test_imported_key_release(self, client, **kwargs): release_result = client.release_key(imported_key_name, attestation) assert release_result.value - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_update_release_policy(self, client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") if client.api_version == ApiVersion.V7_5: pytest.skip("Currently failing on 7.5-preview.1; skipping for now") @@ -584,17 +591,9 @@ def test_update_release_policy(self, client, **kwargs): new_release_policy_json = { "anyOf": [ - { - "anyOf": [ - { - "claim": "sdk-test", - "equals": False - } - ], - "authority": attestation_uri.rstrip("/") + "/" - } + {"anyOf": [{"claim": "sdk-test", "equals": False}], "authority": attestation_uri.rstrip("/") + "/"} ], - "version": "1.0.0" + "version": "1.0.0", } policy_string = json.dumps(new_release_policy_json).encode() new_release_policy = KeyReleasePolicy(policy_string) @@ -605,12 +604,12 @@ def test_update_release_policy(self, client, **kwargs): claim_condition = claim_condition if isinstance(claim_condition, bool) else json.loads(claim_condition) assert claim_condition is False - #Immutable policies aren't currently supported on Managed HSM - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + # Immutable policies aren't currently supported on Managed HSM + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_immutable_release_policy(self, client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") attestation_uri = self._get_attestation_uri() @@ -624,17 +623,9 @@ def test_immutable_release_policy(self, client, **kwargs): new_release_policy_json = { "anyOf": [ - { - "anyOf": [ - { - "claim": "sdk-test", - "equals": False - } - ], - "authority": attestation_uri.rstrip("/") + "/" - } + {"anyOf": [{"claim": "sdk-test", "equals": False}], "authority": attestation_uri.rstrip("/") + "/"} ], - "version": "1.0.0" + "version": "1.0.0", } policy_string = json.dumps(new_release_policy_json).encode() new_release_policy = KeyReleasePolicy(policy_string, immutable=True) @@ -642,11 +633,11 @@ def test_immutable_release_policy(self, client, **kwargs): with pytest.raises(HttpResponseError): self._update_key_properties(client, key, new_release_policy) - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_key_rotation(self, client, is_hsm, **kwargs): - if (not is_public_cloud() and self.is_live): + if not is_public_cloud() and self.is_live: pytest.skip("This test is not supported in usgov/china region. Follow up with service team.") key_name = self.get_resource_name("rotation-key") @@ -663,11 +654,11 @@ def test_key_rotation(self, client, is_hsm, **kwargs): assert key.properties.version != rotated_key.properties.version assert key.key.n != rotated_key.key.n - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_key_rotation_policy(self, client, is_hsm, **kwargs): - if (not is_public_cloud() and self.is_live): + if not is_public_cloud() and self.is_live: pytest.skip("This test is not supported in usgov/china region. Follow up with service team.") key_name = self.get_resource_name("rotation-key") @@ -720,7 +711,9 @@ def test_key_rotation_policy(self, client, is_hsm, **kwargs): if not is_hsm: # updating with a round-tripped policy and overriding lifetime_actions newest_actions = [KeyRotationLifetimeAction(KeyRotationPolicyAction.notify, time_before_expiry="P60D")] - newest_policy = client.update_key_rotation_policy(key_name, policy=new_policy, lifetime_actions=newest_actions) + newest_policy = client.update_key_rotation_policy( + key_name, policy=new_policy, lifetime_actions=newest_actions + ) newest_fetched_policy = client.get_key_rotation_policy(key_name) assert newest_policy.expires_in == "P90D" _assert_rotation_policies_equal(newest_policy, newest_fetched_policy) @@ -738,7 +731,7 @@ def test_key_rotation_policy(self, client, is_hsm, **kwargs): newest_fetched_policy_actions = newest_fetched_policy.lifetime_actions[i] _assert_lifetime_actions_equal(newest_policy_actions, newest_fetched_policy_actions) - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_get_cryptography_client(self, client, is_hsm, **kwargs): @@ -774,7 +767,7 @@ def test_get_cryptography_client(self, client, is_hsm, **kwargs): assert "RSA-OAEP" == result.algorithm assert plaintext == result.plaintext - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @KeysClientPreparer() @recorded_by_proxy def test_send_request(self, client, is_hsm, **kwargs): @@ -790,7 +783,7 @@ def test_send_request(self, client, is_hsm, **kwargs): response = client.send_request(request) assert response.json()["key"]["kid"] == key.id - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_default) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_default) @KeysClientPreparer() @recorded_by_proxy def test_get_key_attestation(self, client, **kwargs): @@ -834,6 +827,7 @@ def test_40x_handling(self, client, **kwargs): # Test that 409 is raised correctly (`create_key` shouldn't actually trigger this, but for raising behavior) def run(*_, **__): return Mock(http_response=Mock(status_code=409)) + with patch.object(client._client._client._pipeline, "run", run): with pytest.raises(ResourceExistsError): client.create_key("...", "RSA") diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_keys_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_keys_async.py index f7a8c1fcf8c1..65b553d0b3f4 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_keys_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_keys_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -28,7 +29,13 @@ import pytest from _shared.test_case_async import KeyVaultTestCase -from _async_test_case import get_attestation_token, get_decorator, get_release_policy, is_public_cloud, AsyncKeysClientPreparer +from _async_test_case import ( + get_attestation_token, + get_decorator, + get_release_policy, + is_public_cloud, + AsyncKeysClientPreparer, +) from test_key_client import _assert_lifetime_actions_equal, _assert_rotation_policies_equal from devtools_testutils import set_bodiless_matcher from devtools_testutils.aio import recorded_by_proxy_async @@ -38,12 +45,8 @@ all_api_versions = get_decorator(is_async=True) only_hsm = get_decorator(only_hsm=True, is_async=True) only_hsm_default = get_decorator(only_hsm=True, is_async=True, api_versions=[DEFAULT_VERSION]) -only_hsm_7_4_plus = get_decorator( - only_hsm=True, is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5] -) -only_vault_7_4_plus = get_decorator( - only_vault=True, is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5] -) +only_hsm_7_4_plus = get_decorator(only_hsm=True, is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5]) +only_vault_7_4_plus = get_decorator(only_vault=True, is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5]) only_7_4_plus = get_decorator(is_async=True, api_versions=[ApiVersion.V7_4, ApiVersion.V7_5]) logging_enabled = get_decorator(is_async=True, logging_enable=True) logging_disabled = get_decorator(is_async=True, logging_enable=False) @@ -69,15 +72,15 @@ def _assert_jwks_equal(self, jwk1, jwk2): assert getattr(jwk1, field) == getattr(jwk2, field) def _assert_key_attributes_equal(self, k1: KeyProperties, k2: KeyProperties) -> None: - assert k1.name== k2.name - assert k1.vault_url== k2.vault_url - assert k1.enabled== k2.enabled - assert k1.not_before== k2.not_before - assert k1.expires_on== k2.expires_on - assert k1.created_on== k2.created_on - assert k1.updated_on== k2.updated_on - assert k1.tags== k2.tags - assert k1.recovery_level== k2.recovery_level + assert k1.name == k2.name + assert k1.vault_url == k2.vault_url + assert k1.enabled == k2.enabled + assert k1.not_before == k2.not_before + assert k1.expires_on == k2.expires_on + assert k1.created_on == k2.created_on + assert k1.updated_on == k2.updated_on + assert k1.tags == k2.tags + assert k1.recovery_level == k2.recovery_level assert k1.hsm_platform == k2.hsm_platform async def _create_rsa_key(self, client, key_name, **kwargs): @@ -107,7 +110,9 @@ def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kt assert key_curve == key.crv assert kid.index(prefix) == 0, f"Key Id should start with '{prefix}', but value is '{kid}'" assert key.kty == kty, f"kty should be '{kty}', but is '{key.kty}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) @@ -117,7 +122,9 @@ def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops assert key.kty == kty, f"kty should be '{kty}', but is '{key.kty}'" assert key.n and key.e, "Bad RSA public material." assert sorted(key_ops) == sorted(key.key_ops), f"keyOps should be '{key_ops}', but is '{key.key_ops}'" - assert key_attributes.properties.created_on and key_attributes.properties.updated_on,"Missing required date attributes." + assert ( + key_attributes.properties.created_on and key_attributes.properties.updated_on + ), "Missing required date attributes." async def _update_key_properties(self, client, key, release_policy=None): expires = date_parse.parse("2050-01-02T08:00:00.000Z") @@ -183,7 +190,7 @@ def _to_bytes(hex): return imported_key @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_crud_operations(self, client, is_hsm, **kwargs): @@ -224,9 +231,7 @@ async def test_key_crud_operations(self, client, is_hsm, **kwargs): self._assert_key_attributes_equal(rsa_key.properties, key.properties) # get key without version - self._assert_key_attributes_equal( - rsa_key.properties, (await client.get_key(rsa_key.name)).properties - ) + self._assert_key_attributes_equal(rsa_key.properties, (await client.get_key(rsa_key.name)).properties) # update key with version if self.is_live: @@ -242,7 +247,9 @@ async def test_key_crud_operations(self, client, is_hsm, **kwargs): # aside from key_ops, the original updated keys should have the same JWKs self._assert_jwks_equal(rsa_key.key, deleted_key.key) assert deleted_key.id == rsa_key.id - assert deleted_key.recovery_id and deleted_key.deleted_date and deleted_key.scheduled_purge_date,"Missing required deleted key attributes." + assert ( + deleted_key.recovery_id and deleted_key.deleted_date and deleted_key.scheduled_purge_date + ), "Missing required deleted key attributes." # get the deleted key when soft deleted enabled deleted_key = await client.get_deleted_key(rsa_key.name) @@ -250,7 +257,7 @@ async def test_key_crud_operations(self, client, is_hsm, **kwargs): assert rsa_key.id == deleted_key.id @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_rsa_public_exponent(self, client, **kwargs): @@ -263,7 +270,7 @@ async def test_rsa_public_exponent(self, client, **kwargs): assert public_exponent == 17 @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_backup_restore(self, client, is_hsm, **kwargs): @@ -276,7 +283,7 @@ async def test_backup_restore(self, client, is_hsm, **kwargs): # backup key key_backup = await client.backup_key(created_bundle.name) - #self.assertIsNotNone(key_backup, "key_backup") + # self.assertIsNotNone(key_backup, "key_backup") assert key_backup is not None # delete key @@ -291,7 +298,7 @@ async def test_backup_restore(self, client, is_hsm, **kwargs): self._assert_key_attributes_equal(created_bundle.properties, restored_key.properties) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_list(self, client, is_hsm, **kwargs): @@ -315,7 +322,7 @@ async def test_key_list(self, client, is_hsm, **kwargs): assert len(expected) == 0 @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_list_versions(self, client, is_hsm, **kwargs): @@ -343,7 +350,7 @@ async def test_list_versions(self, client, is_hsm, **kwargs): @pytest.mark.asyncio @pytest.mark.skip("Temporarily disabled due to service issue") - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_list_deleted_keys(self, client, is_hsm, **kwargs): @@ -376,7 +383,7 @@ async def test_list_deleted_keys(self, client, is_hsm, **kwargs): @pytest.mark.asyncio @pytest.mark.skip("Temporarily disabled due to service issue") - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_recover(self, client, is_hsm, **kwargs): @@ -407,7 +414,7 @@ async def test_recover(self, client, is_hsm, **kwargs): assert len(set(expected.keys()) & set(actual.keys())) == len(expected) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_purge(self, client, is_hsm, **kwargs): @@ -435,8 +442,8 @@ async def test_purge(self, client, is_hsm, **kwargs): assert deleted_key.name not in key_names @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",logging_enabled) - @AsyncKeysClientPreparer(logging_enable = True) + @pytest.mark.parametrize("api_version,is_hsm", logging_enabled) + @AsyncKeysClientPreparer(logging_enable=True) @recorded_by_proxy_async async def test_logging_enabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() @@ -471,8 +478,8 @@ async def test_logging_enabled(self, client, is_hsm, **kwargs): assert False, "Expected request body wasn't logged" @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",logging_disabled) - @AsyncKeysClientPreparer(logging_enable = False) + @pytest.mark.parametrize("api_version,is_hsm", logging_disabled) + @AsyncKeysClientPreparer(logging_enable=False) @recorded_by_proxy_async async def test_logging_disabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() @@ -506,7 +513,7 @@ async def test_logging_disabled(self, client, is_hsm, **kwargs): mock_handler.close() @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_get_random_bytes(self, client, **kwargs): @@ -523,11 +530,11 @@ async def test_get_random_bytes(self, client, **kwargs): generated_random_bytes.append(random_bytes) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_release(self, client, is_hsm, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") if is_hsm and client.api_version == ApiVersion.V7_5: pytest.skip("Currently failing on 7.5-preview.1; skipping for now") @@ -553,7 +560,7 @@ async def test_key_release(self, client, is_hsm, **kwargs): pytest.skip("Target environment attestation statement cannot be verified. Likely transient failure.") @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_imported_key_release(self, client, **kwargs): @@ -576,11 +583,11 @@ async def test_imported_key_release(self, client, **kwargs): assert release_result.value @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_update_release_policy(self, client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") if client.api_version == ApiVersion.V7_5: pytest.skip("Currently failing on 7.5-preview.1; skipping for now") @@ -600,17 +607,9 @@ async def test_update_release_policy(self, client, **kwargs): new_release_policy_json = { "anyOf": [ - { - "anyOf": [ - { - "claim": "sdk-test", - "equals": False - } - ], - "authority": attestation_uri.rstrip("/") + "/" - } + {"anyOf": [{"claim": "sdk-test", "equals": False}], "authority": attestation_uri.rstrip("/") + "/"} ], - "version": "1.0.0" + "version": "1.0.0", } policy_string = json.dumps(new_release_policy_json).encode() new_release_policy = KeyReleasePolicy(policy_string) @@ -623,11 +622,11 @@ async def test_update_release_policy(self, client, **kwargs): # Immutable policies aren't currently supported on Managed HSM @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_immutable_release_policy(self, client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") attestation_uri = self._get_attestation_uri() @@ -641,17 +640,9 @@ async def test_immutable_release_policy(self, client, **kwargs): new_release_policy_json = { "anyOf": [ - { - "anyOf": [ - { - "claim": "sdk-test", - "equals": False - } - ], - "authority": attestation_uri.rstrip("/") + "/" - } + {"anyOf": [{"claim": "sdk-test", "equals": False}], "authority": attestation_uri.rstrip("/") + "/"} ], - "version": "1.0.0" + "version": "1.0.0", } policy_string = json.dumps(new_release_policy_json).encode() new_release_policy = KeyReleasePolicy(policy_string, immutable=True) @@ -660,11 +651,11 @@ async def test_immutable_release_policy(self, client, **kwargs): await self._update_key_properties(client, key, new_release_policy) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_rotation(self, client, is_hsm, **kwargs): - if (not is_public_cloud() and self.is_live): + if not is_public_cloud() and self.is_live: pytest.skip("This test is not supported in usgov/china region. Follow up with service team.") key_name = self.get_resource_name("rotation-key") @@ -684,11 +675,11 @@ async def test_key_rotation(self, client, is_hsm, **kwargs): assert key.key.n != rotated_key.key.n @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_key_rotation_policy(self, client, is_hsm, **kwargs): - if (not is_public_cloud() and self.is_live): + if not is_public_cloud() and self.is_live: pytest.skip("This test is not supported in usgov/china region. Follow up with service team.") key_name = self.get_resource_name("rotation-key") @@ -763,7 +754,7 @@ async def test_key_rotation_policy(self, client, is_hsm, **kwargs): _assert_lifetime_actions_equal(newest_policy_actions, newest_fetched_policy_actions) @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_get_cryptography_client(self, client, is_hsm, **kwargs): @@ -800,7 +791,7 @@ async def test_get_cryptography_client(self, client, is_hsm, **kwargs): assert plaintext == result.plaintext @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_vault_7_4_plus) + @pytest.mark.parametrize("api_version,is_hsm", only_vault_7_4_plus) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_send_request(self, client, is_hsm, **kwargs): @@ -817,7 +808,7 @@ async def test_send_request(self, client, is_hsm, **kwargs): assert response.json()["key"]["kid"] == key.id @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm_default) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm_default) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_get_key_attestation(self, client, **kwargs): @@ -864,6 +855,7 @@ async def test_40x_handling(self, client, **kwargs): # Test that 409 is raised correctly (`create_key` shouldn't actually trigger this, but for raising behavior) async def run(*_, **__): return Mock(http_response=Mock(status_code=409)) + with patch.object(client._client._client._pipeline, "run", run): with pytest.raises(ResourceExistsError): await client.create_key("...", "RSA") diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_local_crypto.py b/sdk/keyvault/azure-keyvault-keys/tests/test_local_crypto.py index 52ec32992543..e9b2867fc08d 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_local_crypto.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_local_crypto.py @@ -7,10 +7,8 @@ import pytest from azure.keyvault.keys import KeyCurveName, KeyVaultKey -from azure.keyvault.keys.crypto import (EncryptionAlgorithm, KeyWrapAlgorithm, - SignatureAlgorithm) -from azure.keyvault.keys.crypto._providers import \ - get_local_cryptography_provider +from azure.keyvault.keys.crypto import EncryptionAlgorithm, KeyWrapAlgorithm, SignatureAlgorithm +from azure.keyvault.keys.crypto._providers import get_local_cryptography_provider from keys import EC_KEYS, RSA_KEYS @@ -48,14 +46,14 @@ def test_rsa_encrypt_decrypt(key, algorithm): (EncryptionAlgorithm.a256_cbcpad, 32), (EncryptionAlgorithm.a192_cbcpad, 24), (EncryptionAlgorithm.a128_cbcpad, 16), - ) + ), ) def test_symmetric_encrypt_decrypt(algorithm, key_size): jwk = { "k": os.urandom(key_size), - "kid":"http://localhost/keys/key/version", + "kid": "http://localhost/keys/key/version", "kty": "oct-HSM", - "key_ops": ("encrypt", "decrypt") + "key_ops": ("encrypt", "decrypt"), } key = KeyVaultKey(key_id="http://localhost/keys/key/version", jwk=jwk) provider = get_local_cryptography_provider(key.key) @@ -119,9 +117,9 @@ def test_rsa_wrap_unwrap(key, algorithm): def test_symmetric_wrap_unwrap(algorithm): jwk = { "k": os.urandom(32), - "kid":"http://localhost/keys/key/version", + "kid": "http://localhost/keys/key/version", "kty": "oct", - "key_ops": ("unwrapKey", "wrapKey") + "key_ops": ("unwrapKey", "wrapKey"), } key = KeyVaultKey(key_id="http://localhost/keys/key/version", jwk=jwk) provider = get_local_cryptography_provider(key.key) diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_parse_id.py b/sdk/keyvault/azure-keyvault-keys/tests/test_parse_id.py index 3f72b8cb9556..eb33db5f2860 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_parse_id.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_parse_id.py @@ -15,7 +15,7 @@ class TestParseId(KeyVaultTestCase, KeysTestCase): - @pytest.mark.parametrize("api_version,is_hsm",only_vault) + @pytest.mark.parametrize("api_version,is_hsm", only_vault) @KeysClientPreparer() @recorded_by_proxy def test_parse_key_id_with_version(self, client, **kwargs): diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys.py b/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys.py index 68e2b6496d64..be044fe64650 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys.py @@ -36,11 +36,11 @@ def test_create_key_client(): class TestExamplesKeyVault(KeyVaultTestCase, KeysTestCase): - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_example_key_crud_operations(self, key_client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") key_name = self.get_resource_name("key-name") @@ -131,7 +131,7 @@ def test_example_key_crud_operations(self, key_client, **kwargs): deleted_key_poller.wait() # [END delete_key] - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @KeysClientPreparer() @recorded_by_proxy def test_example_create_oct_key(self, key_client, **kwargs): @@ -145,7 +145,7 @@ def test_example_create_oct_key(self, key_client, **kwargs): print(key.key_type) # [END create_oct_key] - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_example_key_list_operations(self, key_client, **kwargs): @@ -186,7 +186,7 @@ def test_example_key_list_operations(self, key_client, **kwargs): print(key.deleted_date) # [END list_deleted_keys] - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_example_keys_backup_restore(self, key_client, **kwargs): @@ -219,7 +219,7 @@ def test_example_keys_backup_restore(self, key_client, **kwargs): print(restored_key.properties.version) # [END restore_key_backup] - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @KeysClientPreparer() @recorded_by_proxy def test_example_keys_recover(self, key_client, **kwargs): diff --git a/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys_async.py b/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys_async.py index 0357017a4b44..e8e127c82d9e 100644 --- a/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys_async.py +++ b/sdk/keyvault/azure-keyvault-keys/tests/test_samples_keys_async.py @@ -42,11 +42,11 @@ async def test_create_key_client(): class TestExamplesKeyVault(KeyVaultTestCase): @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_key_crud_operations(self, key_client, **kwargs): - if (self.is_live and os.environ["KEYVAULT_SKU"] != "premium"): + if self.is_live and os.environ["KEYVAULT_SKU"] != "premium": pytest.skip("This test is not supported on standard SKU vaults. Follow up with service team") key_name = self.get_resource_name("key-name") @@ -132,7 +132,7 @@ async def test_example_key_crud_operations(self, key_client, **kwargs): # [END delete_key] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",only_hsm) + @pytest.mark.parametrize("api_version,is_hsm", only_hsm) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_create_oct_key(self, key_client, **kwargs): @@ -147,7 +147,7 @@ async def test_example_create_oct_key(self, key_client, **kwargs): # [END create_oct_key] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_key_list_operations(self, key_client, **kwargs): @@ -194,7 +194,7 @@ async def test_example_key_list_operations(self, key_client, **kwargs): # [END list_deleted_keys] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_keys_backup_restore(self, key_client, **kwargs): @@ -229,7 +229,7 @@ async def test_example_keys_backup_restore(self, key_client, **kwargs): # [END restore_key_backup] @pytest.mark.asyncio - @pytest.mark.parametrize("api_version,is_hsm",all_api_versions) + @pytest.mark.parametrize("api_version,is_hsm", all_api_versions) @AsyncKeysClientPreparer() @recorded_by_proxy_async async def test_example_keys_recover(self, key_client, **kwargs): diff --git a/sdk/keyvault/azure-keyvault-secrets/MANIFEST.in b/sdk/keyvault/azure-keyvault-secrets/MANIFEST.in index 91c95d391763..ff7a325b7774 100644 --- a/sdk/keyvault/azure-keyvault-secrets/MANIFEST.in +++ b/sdk/keyvault/azure-keyvault-secrets/MANIFEST.in @@ -1,7 +1,8 @@ include *.md include LICENSE -include azure/keyvault/secrets/py.typed +include azure/keyvault/secrets/_generated/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py include azure/keyvault/__init__.py +include azure/keyvault/secrets/__init__.py diff --git a/sdk/keyvault/azure-keyvault-secrets/_metadata.json b/sdk/keyvault/azure-keyvault-secrets/_metadata.json new file mode 100644 index 000000000000..06284fddac1b --- /dev/null +++ b/sdk/keyvault/azure-keyvault-secrets/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "7.6" +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-secrets/apiview-properties.json b/sdk/keyvault/azure-keyvault-secrets/apiview-properties.json new file mode 100644 index 000000000000..e4da985ff219 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-secrets/apiview-properties.json @@ -0,0 +1,41 @@ +{ + "CrossLanguagePackageId": "KeyVault", + "CrossLanguageDefinitionId": { + "azure.keyvault.secrets._generated.models.BackupSecretResult": "KeyVault.BackupSecretResult", + "azure.keyvault.secrets._generated.models.DeletedSecretBundle": "KeyVault.DeletedSecretBundle", + "azure.keyvault.secrets._generated.models.DeletedSecretItem": "KeyVault.DeletedSecretItem", + "azure.keyvault.secrets._generated.models.KeyVaultError": "KeyVaultError", + "azure.keyvault.secrets._generated.models.KeyVaultErrorError": "KeyVaultError.error.anonymous", + "azure.keyvault.secrets._generated.models.SecretAttributes": "KeyVault.SecretAttributes", + "azure.keyvault.secrets._generated.models.SecretBundle": "KeyVault.SecretBundle", + "azure.keyvault.secrets._generated.models.SecretItem": "KeyVault.SecretItem", + "azure.keyvault.secrets._generated.models.SecretRestoreParameters": "KeyVault.SecretRestoreParameters", + "azure.keyvault.secrets._generated.models.SecretSetParameters": "KeyVault.SecretSetParameters", + "azure.keyvault.secrets._generated.models.SecretUpdateParameters": "KeyVault.SecretUpdateParameters", + "azure.keyvault.secrets._generated.models.DeletionRecoveryLevel": "KeyVault.DeletionRecoveryLevel", + "azure.keyvault.secrets._generated.KeyVaultClient.set_secret": "KeyVault.setSecret", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.set_secret": "KeyVault.setSecret", + "azure.keyvault.secrets._generated.KeyVaultClient.delete_secret": "KeyVault.deleteSecret", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.delete_secret": "KeyVault.deleteSecret", + "azure.keyvault.secrets._generated.KeyVaultClient.update_secret": "KeyVault.updateSecret", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.update_secret": "KeyVault.updateSecret", + "azure.keyvault.secrets._generated.KeyVaultClient.get_secret": "KeyVault.getSecret", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_secret": "KeyVault.getSecret", + "azure.keyvault.secrets._generated.KeyVaultClient.get_secrets": "KeyVault.getSecrets", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_secrets": "KeyVault.getSecrets", + "azure.keyvault.secrets._generated.KeyVaultClient.get_secret_versions": "KeyVault.getSecretVersions", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_secret_versions": "KeyVault.getSecretVersions", + "azure.keyvault.secrets._generated.KeyVaultClient.get_deleted_secrets": "KeyVault.getDeletedSecrets", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_deleted_secrets": "KeyVault.getDeletedSecrets", + "azure.keyvault.secrets._generated.KeyVaultClient.get_deleted_secret": "KeyVault.getDeletedSecret", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.get_deleted_secret": "KeyVault.getDeletedSecret", + "azure.keyvault.secrets._generated.KeyVaultClient.purge_deleted_secret": "KeyVault.purgeDeletedSecret", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.purge_deleted_secret": "KeyVault.purgeDeletedSecret", + "azure.keyvault.secrets._generated.KeyVaultClient.recover_deleted_secret": "KeyVault.recoverDeletedSecret", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.recover_deleted_secret": "KeyVault.recoverDeletedSecret", + "azure.keyvault.secrets._generated.KeyVaultClient.backup_secret": "KeyVault.backupSecret", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.backup_secret": "KeyVault.backupSecret", + "azure.keyvault.secrets._generated.KeyVaultClient.restore_secret": "KeyVault.restoreSecret", + "azure.keyvault.secrets._generated.aio.KeyVaultClient.restore_secret": "KeyVault.restoreSecret" + } +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/__init__.py index 125860bac907..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/__init__.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/__init__.py @@ -1,6 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -# pylint:disable=missing-docstring __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/__init__.py index 125860bac907..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/__init__.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/__init__.py @@ -1,6 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -# pylint:disable=missing-docstring __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/__init__.py index ec1b5aaa0651..d55ccad1f573 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/__init__.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/__init__.py @@ -1,19 +1 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -from ._models import DeletedSecret, KeyVaultSecret, KeyVaultSecretIdentifier, SecretProperties -from ._shared.client_base import ApiVersion -from ._client import SecretClient - -__all__ = [ - "ApiVersion", - "SecretClient", - "KeyVaultSecret", - "KeyVaultSecretIdentifier", - "SecretProperties", - "DeletedSecret" -] - -from ._version import VERSION -__version__ = VERSION +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_client.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_client.py index 7a64848801dd..e0eaca55c576 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_client.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_client.py @@ -62,11 +62,7 @@ def get_secret(self, name: str, version: Optional[str] = None, **kwargs: Any) -> :caption: Get a secret :dedent: 8 """ - bundle = self._client.get_secret( - secret_name=name, - secret_version=version or "", - **kwargs - ) + bundle = self._client.get_secret(secret_name=name, secret_version=version or "", **kwargs) return KeyVaultSecret._from_secret_bundle(bundle) @distributed_trace @@ -111,24 +107,15 @@ def set_secret( """ if enabled is not None or not_before is not None or expires_on is not None: - attributes = self._models.SecretAttributes( - enabled=enabled, not_before=not_before, expires=expires_on - ) + attributes = self._models.SecretAttributes(enabled=enabled, not_before=not_before, expires=expires_on) else: attributes = None parameters = self._models.SecretSetParameters( - value=value, - tags=tags, - content_type=content_type, - secret_attributes=attributes + value=value, tags=tags, content_type=content_type, secret_attributes=attributes ) - bundle = self._client.set_secret( - secret_name=name, - parameters=parameters, - **kwargs - ) + bundle = self._client.set_secret(secret_name=name, parameters=parameters, **kwargs) return KeyVaultSecret._from_secret_bundle(bundle) @distributed_trace @@ -175,9 +162,7 @@ def update_secret_properties( """ if enabled is not None or not_before is not None or expires_on is not None: - attributes = self._models.SecretAttributes( - enabled=enabled, not_before=not_before, expires=expires_on - ) + attributes = self._models.SecretAttributes(enabled=enabled, not_before=not_before, expires=expires_on) else: attributes = None @@ -187,12 +172,7 @@ def update_secret_properties( tags=tags, ) - bundle = self._client.update_secret( - name, - secret_version=version or "", - parameters=parameters, - **kwargs - ) + bundle = self._client.update_secret(name, secret_version=version or "", parameters=parameters, **kwargs) return SecretProperties._from_secret_bundle(bundle) # pylint: disable=protected-access @distributed_trace @@ -216,7 +196,7 @@ def list_properties_of_secrets(self, **kwargs: Any) -> ItemPaged[SecretPropertie return self._client.get_secrets( maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -243,7 +223,7 @@ def list_properties_of_secret_versions(self, name: str, **kwargs: Any) -> ItemPa name, maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -292,13 +272,14 @@ def restore_secret_backup(self, backup: bytes, **kwargs: Any) -> SecretPropertie """ bundle = self._client.restore_secret( - parameters=self._models.SecretRestoreParameters(secret_bundle_backup=backup), - **kwargs + parameters=self._models.SecretRestoreParameters(secret_bundle_backup=backup), **kwargs ) return SecretProperties._from_secret_bundle(bundle) @distributed_trace - def begin_delete_secret(self, name: str, **kwargs: Any) -> LROPoller[DeletedSecret]: # pylint:disable=bad-option-value,delete-operation-wrong-return-type + def begin_delete_secret( + self, name: str, **kwargs: Any + ) -> LROPoller[DeletedSecret]: # pylint:disable=bad-option-value,delete-operation-wrong-return-type """Delete all versions of a secret. Requires secrets/delete permission. When this method returns Key Vault has begun deleting the secret. Deletion may take several seconds in a vault @@ -392,7 +373,7 @@ def list_deleted_secrets(self, **kwargs: Any) -> ItemPaged[DeletedSecret]: return self._client.get_deleted_secrets( maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [DeletedSecret._from_deleted_secret_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_client.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_client.py index affcf5d228d3..39918fe3e74a 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_client.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_client.py @@ -15,14 +15,14 @@ from azure.core.rest import HttpRequest, HttpResponse from ._configuration import KeyVaultClientConfiguration -from ._operations import KeyVaultClientOperationsMixin +from ._operations._operations import _KeyVaultClientOperationsMixin from ._utils.serialization import Deserializer, Serializer if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/__init__.py index d514f5e4b5be..933fcd7d1b55 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import KeyVaultClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "KeyVaultClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_operations.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_operations.py index c83ef9a31524..1412fe4b0e98 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload import urllib.parse from azure.core import PipelineClient @@ -343,7 +343,7 @@ def build_key_vault_restore_secret_request(**kwargs: Any) -> HttpRequest: return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -class KeyVaultClientOperationsMixin( +class _KeyVaultClientOperationsMixin( ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], KeyVaultClientConfiguration] ): @@ -826,7 +826,7 @@ def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any) -> _m return deserialized # type: ignore @distributed_trace - def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> Iterable["_models.SecretItem"]: + def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> ItemPaged["_models.SecretItem"]: """List secrets in a specified key vault. The Get Secrets operation is applicable to the entire vault. However, only the base secret @@ -919,7 +919,7 @@ def get_next(next_link=None): @distributed_trace def get_secret_versions( self, secret_name: str, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> Iterable["_models.SecretItem"]: + ) -> ItemPaged["_models.SecretItem"]: """List all versions of the specified secret. The full secret identifier and attributes are provided in the response. No values are returned @@ -1014,7 +1014,7 @@ def get_next(next_link=None): @distributed_trace def get_deleted_secrets( self, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> Iterable["_models.DeletedSecretItem"]: + ) -> ItemPaged["_models.DeletedSecretItem"]: """Lists deleted secrets for the specified vault. The Get Deleted Secrets operation returns the secrets that have been deleted for a vault diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_client.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_client.py index 3f8e48a8e50c..f6cbad08a480 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_client.py @@ -16,13 +16,13 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import KeyVaultClientConfiguration -from ._operations import KeyVaultClientOperationsMixin +from ._operations._operations import _KeyVaultClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class KeyVaultClient(KeyVaultClientOperationsMixin): +class KeyVaultClient(_KeyVaultClientOperationsMixin): """The key vault client performs cryptographic key operations and vault operations against the Key Vault service. diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/__init__.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/__init__.py index d514f5e4b5be..933fcd7d1b55 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/__init__.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import KeyVaultClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "KeyVaultClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_operations.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_operations.py index 4e90fe51dc50..0c70e64ce84f 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_operations.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/aio/_operations/_operations.py @@ -9,7 +9,7 @@ from collections.abc import MutableMapping from io import IOBase import json -from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload import urllib.parse from azure.core import AsyncPipelineClient @@ -54,7 +54,7 @@ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class KeyVaultClientOperationsMixin( +class _KeyVaultClientOperationsMixin( ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], KeyVaultClientConfiguration] ): @@ -537,7 +537,7 @@ async def get_secret(self, secret_name: str, secret_version: str, **kwargs: Any) return deserialized # type: ignore @distributed_trace - def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> AsyncIterable["_models.SecretItem"]: + def get_secrets(self, *, maxresults: Optional[int] = None, **kwargs: Any) -> AsyncItemPaged["_models.SecretItem"]: """List secrets in a specified key vault. The Get Secrets operation is applicable to the entire vault. However, only the base secret @@ -631,7 +631,7 @@ async def get_next(next_link=None): @distributed_trace def get_secret_versions( self, secret_name: str, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable["_models.SecretItem"]: + ) -> AsyncItemPaged["_models.SecretItem"]: """List all versions of the specified secret. The full secret identifier and attributes are provided in the response. No values are returned @@ -727,7 +727,7 @@ async def get_next(next_link=None): @distributed_trace def get_deleted_secrets( self, *, maxresults: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable["_models.DeletedSecretItem"]: + ) -> AsyncItemPaged["_models.DeletedSecretItem"]: """Lists deleted secrets for the specified vault. The Get Deleted Secrets operation returns the secrets that have been deleted for a vault diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_challenge_auth_policy.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_challenge_auth_policy.py index 0f84607e3ccd..3e3ac1855178 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_challenge_auth_policy.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/async_challenge_auth_policy.py @@ -66,7 +66,6 @@ async def await_result(func: Callable[P, Union[T, Awaitable[T]]], *args: P.args, return result - class AsyncChallengeAuthPolicy(AsyncBearerTokenCredentialPolicy): """Policy for handling HTTP authentication challenges. @@ -83,9 +82,7 @@ def __init__(self, credential: AsyncTokenProvider, *scopes: str, **kwargs: Any) self._verify_challenge_resource = kwargs.pop("verify_challenge_resource", True) self._request_copy: Optional[HttpRequest] = None - async def send( - self, request: PipelineRequest[HttpRequest] - ) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: + async def send(self, request: PipelineRequest[HttpRequest]) -> PipelineResponse[HttpRequest, AsyncHttpResponse]: """Authorize request with a bearer token and send it to the next policy. We implement this method to account for the valid scenario where a Key Vault authentication challenge is @@ -156,7 +153,6 @@ async def handle_challenge_flow( await await_result(self.on_response, request, response) return response - async def on_request(self, request: PipelineRequest) -> None: _enforce_tls(request) challenge = ChallengeCache.get_challenge_for_url(request.http_request.url) @@ -227,9 +223,7 @@ async def on_challenge(self, request: PipelineRequest, response: PipelineRespons if challenge.tenant_id and challenge.tenant_id.lower().endswith("adfs"): await self.authorize_request(request, scope, claims=challenge.claims) else: - await self.authorize_request( - request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id - ) + await self.authorize_request(request, scope, claims=challenge.claims, tenant_id=challenge.tenant_id) return True diff --git a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/_client.py b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/_client.py index 20904cf11646..d61c0d545c1e 100644 --- a/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/_client.py +++ b/sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/aio/_client.py @@ -111,17 +111,10 @@ async def set_secret( attributes = None parameters = self._models.SecretSetParameters( - value=value, - tags=tags, - content_type=content_type, - secret_attributes=attributes + value=value, tags=tags, content_type=content_type, secret_attributes=attributes ) - bundle = await self._client.set_secret( - name, - parameters=parameters, - **kwargs - ) + bundle = await self._client.set_secret(name, parameters=parameters, **kwargs) return KeyVaultSecret._from_secret_bundle(bundle) @distributed_trace_async @@ -177,12 +170,7 @@ async def update_secret_properties( tags=tags, ) - bundle = await self._client.update_secret( - name, - secret_version=version or "", - parameters=parameters, - **kwargs - ) + bundle = await self._client.update_secret(name, secret_version=version or "", parameters=parameters, **kwargs) return SecretProperties._from_secret_bundle(bundle) # pylint: disable=protected-access @distributed_trace @@ -205,7 +193,7 @@ def list_properties_of_secrets(self, **kwargs: Any) -> AsyncItemPaged[SecretProp return self._client.get_secrets( maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace @@ -231,7 +219,7 @@ def list_properties_of_secret_versions(self, name: str, **kwargs: Any) -> AsyncI name, maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace_async @@ -278,8 +266,7 @@ async def restore_secret_backup(self, backup: bytes, **kwargs: Any) -> SecretPro :dedent: 8 """ bundle = await self._client.restore_secret( - parameters=self._models.SecretRestoreParameters(secret_bundle_backup=backup), - **kwargs + parameters=self._models.SecretRestoreParameters(secret_bundle_backup=backup), **kwargs ) return SecretProperties._from_secret_bundle(bundle) @@ -371,7 +358,7 @@ def list_deleted_secrets(self, **kwargs: Any) -> AsyncItemPaged[DeletedSecret]: return self._client.get_deleted_secrets( maxresults=kwargs.pop("max_page_size", None), cls=lambda objs: [DeletedSecret._from_deleted_secret_item(x) for x in objs], - **kwargs + **kwargs, ) @distributed_trace_async @@ -441,7 +428,7 @@ async def recover_deleted_secret(self, name: str, **kwargs: Any) -> SecretProper command=command, final_resource=recovered_secret, finished=False, - interval=polling_interval + interval=polling_interval, ) await polling_method.run() diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations.py b/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations.py index 6cf17e27fb66..e03c2f3cace0 100644 --- a/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations.py +++ b/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations_async.py b/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations_async.py index 3412407cee50..61ecc9baf868 100644 --- a/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations_async.py +++ b/sdk/keyvault/azure-keyvault-secrets/samples/backup_restore_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -8,6 +9,7 @@ from azure.keyvault.secrets.aio import SecretClient from azure.identity.aio import DefaultAzureCredential + # ---------------------------------------------------------------------------------------------------------- # Prerequisites: # 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli) @@ -74,4 +76,4 @@ async def run_sample(): if __name__ == "__main__": - asyncio.run(run_sample()) \ No newline at end of file + asyncio.run(run_sample()) diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/hello_world.py b/sdk/keyvault/azure-keyvault-secrets/samples/hello_world.py index f9c4bc8f96fe..3a7748229984 100644 --- a/sdk/keyvault/azure-keyvault-secrets/samples/hello_world.py +++ b/sdk/keyvault/azure-keyvault-secrets/samples/hello_world.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/hello_world_async.py b/sdk/keyvault/azure-keyvault-secrets/samples/hello_world_async.py index ff1e1de91d97..b42e93ce1234 100644 --- a/sdk/keyvault/azure-keyvault-secrets/samples/hello_world_async.py +++ b/sdk/keyvault/azure-keyvault-secrets/samples/hello_world_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -9,6 +10,7 @@ from azure.keyvault.secrets.aio import SecretClient from azure.identity.aio import DefaultAzureCredential + # ---------------------------------------------------------------------------------------------------------- # Prerequisites: # 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli) diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/list_operations.py b/sdk/keyvault/azure-keyvault-secrets/samples/list_operations.py index bf31a8a86fc5..207d938c0720 100644 --- a/sdk/keyvault/azure-keyvault-secrets/samples/list_operations.py +++ b/sdk/keyvault/azure-keyvault-secrets/samples/list_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -58,17 +59,13 @@ for secret in secrets: assert secret.name retrieved_secret = client.get_secret(secret.name) - print( - f"Secret with name '{retrieved_secret.name}' and value {retrieved_secret.name} was found." - ) + print(f"Secret with name '{retrieved_secret.name}' and value {retrieved_secret.name} was found.") # The bank account password got updated, so you want to update the secret in Key Vault to ensure it reflects the # new password. Calling set_secret on an existing secret creates a new version of the secret in the Key Vault # with the new value. updated_secret = client.set_secret(bank_secret.name, "newSecretValue") -print( - f"Secret with name '{updated_secret.name}' was updated with new value '{updated_secret.value}'" -) +print(f"Secret with name '{updated_secret.name}' was updated with new value '{updated_secret.value}'") # You need to check all the different values your bank account password secret had previously. Lets print all # the versions of this secret. @@ -89,6 +86,4 @@ print("\n.. List deleted secrets from the Key Vault") deleted_secrets = client.list_deleted_secrets() for deleted_secret in deleted_secrets: - print( - f"Secret with name '{deleted_secret.name}' has recovery id '{deleted_secret.recovery_id}'" - ) + print(f"Secret with name '{deleted_secret.name}' has recovery id '{deleted_secret.recovery_id}'") diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/list_operations_async.py b/sdk/keyvault/azure-keyvault-secrets/samples/list_operations_async.py index 7a591519b0a0..6c02546e7e58 100644 --- a/sdk/keyvault/azure-keyvault-secrets/samples/list_operations_async.py +++ b/sdk/keyvault/azure-keyvault-secrets/samples/list_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -8,6 +9,7 @@ from azure.keyvault.secrets.aio import SecretClient from azure.identity.aio import DefaultAzureCredential + # ---------------------------------------------------------------------------------------------------------- # Prerequisites: # 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli) @@ -64,9 +66,7 @@ async def run_sample(): # new password. Calling set_secret on an existing secret creates a new version of the secret in the Key Vault # with the new value. updated_secret = await client.set_secret(bank_secret.name, "newSecretValue") - print( - f"Secret with name '{updated_secret.name}' was updated with new value '{updated_secret.value}'" - ) + print(f"Secret with name '{updated_secret.name}' was updated with new value '{updated_secret.value}'") # You need to check all the different values your bank account password secret had previously. Lets print all # the versions of this secret. @@ -84,9 +84,7 @@ async def run_sample(): print("\n.. List deleted secrets from the Key Vault") deleted_secrets = client.list_deleted_secrets() async for deleted_secret in deleted_secrets: - print( - f"Secret with name '{deleted_secret.name}' has recovery id '{deleted_secret.recovery_id}'" - ) + print(f"Secret with name '{deleted_secret.name}' has recovery id '{deleted_secret.recovery_id}'") print("\nrun_sample done") await credential.close() diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations.py b/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations.py index c22aac38188e..13d810d778bb 100644 --- a/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations.py +++ b/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations_async.py b/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations_async.py index b69b0bcfb4ef..60e690ec9939 100644 --- a/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations_async.py +++ b/sdk/keyvault/azure-keyvault-secrets/samples/recover_purge_operations_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -8,6 +9,7 @@ from azure.keyvault.secrets.aio import SecretClient from azure.identity.aio import DefaultAzureCredential + # ---------------------------------------------------------------------------------------------------------- # Prerequisites: # 1. An Azure Key Vault (https://learn.microsoft.com/azure/key-vault/quick-create-cli) diff --git a/sdk/keyvault/azure-keyvault-secrets/setup.py b/sdk/keyvault/azure-keyvault-secrets/setup.py index 62f8b4627517..cfdd4e4bd9ff 100644 --- a/sdk/keyvault/azure-keyvault-secrets/setup.py +++ b/sdk/keyvault/azure-keyvault-secrets/setup.py @@ -14,9 +14,10 @@ PACKAGE_NAME = "azure-keyvault-secrets" PACKAGE_PPRINT_NAME = "Key Vault Secrets" +PACKAGE_NAMESPACE = "azure.keyvault.secrets._generated" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: @@ -29,7 +30,6 @@ setup( name=PACKAGE_NAME, version=version, - include_package_data=True, description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), long_description=open("README.md", "r").read(), long_description_content_type="text/markdown", @@ -39,7 +39,7 @@ url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk", keywords="azure, azure sdk", classifiers=[ - "Development Status :: 5 - Production/Stable", + "Development Status :: ", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", @@ -47,22 +47,21 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", "License :: OSI Approved :: MIT License", ], zip_safe=False, packages=find_packages( exclude=[ - "samples", "tests", - # Exclude packages that will be covered by PEP420 or nspkg - "azure", - "azure.keyvault", ] ), + include_package_data=True, + package_data={ + "azure.keyvault.secrets._generated": ["py.typed"], + }, install_requires=[ "isodate>=0.6.1", - "azure-core>=1.31.0", + "azure-core>=1.30.0", "typing-extensions>=4.6.0", ], python_requires=">=3.9", diff --git a/sdk/keyvault/azure-keyvault-secrets/tests/conftest.py b/sdk/keyvault/azure-keyvault-secrets/tests/conftest.py index 96ff292c3f2c..e630f5c4ab81 100644 --- a/sdk/keyvault/azure-keyvault-secrets/tests/conftest.py +++ b/sdk/keyvault/azure-keyvault-secrets/tests/conftest.py @@ -31,7 +31,7 @@ test_proxy, add_oauth_response_sanitizer, add_general_regex_sanitizer, - remove_batch_sanitizers + remove_batch_sanitizers, ) diff --git a/sdk/keyvault/azure-keyvault-secrets/tests/test_polling_method.py b/sdk/keyvault/azure-keyvault-secrets/tests/test_polling_method.py index fded987db56c..22189629419d 100644 --- a/sdk/keyvault/azure-keyvault-secrets/tests/test_polling_method.py +++ b/sdk/keyvault/azure-keyvault-secrets/tests/test_polling_method.py @@ -74,7 +74,9 @@ def command(): _command.operation_complete = True resource = object() - polling_method = DeleteRecoverPollingMethod(mock_pipeline_response, command, final_resource=resource, finished=False) + polling_method = DeleteRecoverPollingMethod( + mock_pipeline_response, command, final_resource=resource, finished=False + ) assert not polling_method.finished() with mock.patch(SLEEP) as sleep: @@ -102,7 +104,9 @@ def test_final_resource(): assert final_resource is resource command = mock.Mock() - polling_method = DeleteRecoverPollingMethod(mock_pipeline_response, command, final_resource=resource, finished=False) + polling_method = DeleteRecoverPollingMethod( + mock_pipeline_response, command, final_resource=resource, finished=False + ) assert polling_method.resource() is resource polling_method.run() diff --git a/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_async.py b/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_async.py index 435d6c6d04b1..8408e6e35890 100644 --- a/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_async.py +++ b/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_async.py @@ -390,6 +390,7 @@ async def test_40x_handling(self, client, **kwargs): # Test that 409 is raised correctly (`set_secret` shouldn't actually trigger this, but for raising behavior) async def run(*_, **__): return Mock(http_response=Mock(status_code=409)) + with patch.object(client._client._client._pipeline, "run", run): with pytest.raises(ResourceExistsError): await client.set_secret("...", "...") diff --git a/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_client.py b/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_client.py index ed09c2759c48..d92537cc0f19 100644 --- a/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_client.py +++ b/sdk/keyvault/azure-keyvault-secrets/tests/test_secrets_client.py @@ -379,6 +379,7 @@ def test_40x_handling(self, client, **kwargs): # Test that 409 is raised correctly (`set_secret` shouldn't actually trigger this, but for raising behavior) def run(*_, **__): return Mock(http_response=Mock(status_code=409)) + with patch.object(client._client._client._pipeline, "run", run): with pytest.raises(ResourceExistsError): client.set_secret("...", "...") diff --git a/sdk/keyvault/azure-keyvault-securitydomain/MANIFEST.in b/sdk/keyvault/azure-keyvault-securitydomain/MANIFEST.in index 10eedf4ae5b1..2284957104d3 100644 --- a/sdk/keyvault/azure-keyvault-securitydomain/MANIFEST.in +++ b/sdk/keyvault/azure-keyvault-securitydomain/MANIFEST.in @@ -1,7 +1,5 @@ include *.md include LICENSE -include azure/keyvault/securitydomain/py.typed +include clientcustomizations/py.typed recursive-include tests *.py recursive-include samples *.py *.md -include azure/__init__.py -include azure/keyvault/__init__.py diff --git a/sdk/keyvault/azure-keyvault-securitydomain/_metadata.json b/sdk/keyvault/azure-keyvault-securitydomain/_metadata.json new file mode 100644 index 000000000000..a01010ad9627 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "7.5" +} \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-securitydomain/apiview-properties.json b/sdk/keyvault/azure-keyvault-securitydomain/apiview-properties.json index 77d0d994a2ac..70ab175eb4d5 100644 --- a/sdk/keyvault/azure-keyvault-securitydomain/apiview-properties.json +++ b/sdk/keyvault/azure-keyvault-securitydomain/apiview-properties.json @@ -1,19 +1,19 @@ { "CrossLanguagePackageId": "KeyVault", "CrossLanguageDefinitionId": { - "azure.keyvault.securitydomain.models.CertificateInfo": "KeyVault.CertificateInfoObject", - "azure.keyvault.securitydomain.models.Error": "Error", - "azure.keyvault.securitydomain.models.KeyVaultError": "KeyVaultError", - "azure.keyvault.securitydomain.models.SecurityDomain": "KeyVault.SecurityDomainObject", - "azure.keyvault.securitydomain.models.SecurityDomainJsonWebKey": "KeyVault.SecurityDomainJsonWebKey", - "azure.keyvault.securitydomain.models.SecurityDomainOperationStatus": "KeyVault.SecurityDomainOperationStatus", - "azure.keyvault.securitydomain.models.TransferKey": "KeyVault.TransferKey", - "azure.keyvault.securitydomain.models.OperationStatus": "KeyVault.OperationStatus", - "azure.keyvault.securitydomain.SecurityDomainClient.get_download_status": "ClientCustomizations.SecurityDomainClient.getDownloadStatus", - "azure.keyvault.securitydomain.aio.SecurityDomainClient.get_download_status": "ClientCustomizations.SecurityDomainClient.getDownloadStatus", - "azure.keyvault.securitydomain.SecurityDomainClient.get_upload_status": "ClientCustomizations.SecurityDomainClient.getUploadStatus", - "azure.keyvault.securitydomain.aio.SecurityDomainClient.get_upload_status": "ClientCustomizations.SecurityDomainClient.getUploadStatus", - "azure.keyvault.securitydomain.SecurityDomainClient.get_transfer_key": "ClientCustomizations.SecurityDomainClient.getTransferKey", - "azure.keyvault.securitydomain.aio.SecurityDomainClient.get_transfer_key": "ClientCustomizations.SecurityDomainClient.getTransferKey" + "keyvault.models.CertificateInfo": "KeyVault.CertificateInfoObject", + "clientcustomizations.models.KeyVaultError": "KeyVaultError", + "clientcustomizations.models.KeyVaultErrorError": "KeyVaultError.error.anonymous", + "keyvault.models.SecurityDomain": "KeyVault.SecurityDomainObject", + "keyvault.models.SecurityDomainJsonWebKey": "KeyVault.SecurityDomainJsonWebKey", + "keyvault.models.SecurityDomainOperationStatus": "KeyVault.SecurityDomainOperationStatus", + "keyvault.models.TransferKey": "KeyVault.TransferKey", + "clientcustomizations.models.OperationStatus": "KeyVault.OperationStatus", + "clientcustomizations.SecurityDomainClient.get_download_status": "ClientCustomizations.SecurityDomainClient.getDownloadStatus", + "clientcustomizations.aio.SecurityDomainClient.get_download_status": "ClientCustomizations.SecurityDomainClient.getDownloadStatus", + "clientcustomizations.SecurityDomainClient.get_upload_status": "ClientCustomizations.SecurityDomainClient.getUploadStatus", + "clientcustomizations.aio.SecurityDomainClient.get_upload_status": "ClientCustomizations.SecurityDomainClient.getUploadStatus", + "clientcustomizations.SecurityDomainClient.get_transfer_key": "ClientCustomizations.SecurityDomainClient.getTransferKey", + "clientcustomizations.aio.SecurityDomainClient.get_transfer_key": "ClientCustomizations.SecurityDomainClient.getTransferKey" } } \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-securitydomain/azure/keyvault/securitydomain/_internal/async_polling.py b/sdk/keyvault/azure-keyvault-securitydomain/azure/keyvault/securitydomain/_internal/async_polling.py index 65d80af3bdaa..fedc2a351f49 100644 --- a/sdk/keyvault/azure-keyvault-securitydomain/azure/keyvault/securitydomain/_internal/async_polling.py +++ b/sdk/keyvault/azure-keyvault-securitydomain/azure/keyvault/securitydomain/_internal/async_polling.py @@ -159,5 +159,6 @@ def resource(self) -> None: """ return None + class AsyncSecurityDomainUploadNoPolling(AsyncSecurityDomainUploadPollingMethod, AsyncNoPollingMixin): pass diff --git a/sdk/keyvault/azure-keyvault-securitydomain/azure/keyvault/securitydomain/_internal/polling.py b/sdk/keyvault/azure-keyvault-securitydomain/azure/keyvault/securitydomain/_internal/polling.py index ecbfc1775ed3..b06a686a0534 100644 --- a/sdk/keyvault/azure-keyvault-securitydomain/azure/keyvault/securitydomain/_internal/polling.py +++ b/sdk/keyvault/azure-keyvault-securitydomain/azure/keyvault/securitydomain/_internal/polling.py @@ -199,5 +199,6 @@ def resource(self) -> None: """ return None + class SecurityDomainUploadNoPolling(SecurityDomainUploadPollingMethod, NoPollingMixin): pass diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/__init__.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/__init__.py new file mode 100644 index 000000000000..5cb269a9b5a9 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import SecurityDomainClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "SecurityDomainClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_client.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_client.py new file mode 100644 index 000000000000..f43506098cba --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_client.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from ._configuration import SecurityDomainClientConfiguration +from ._operations._operations import _SecurityDomainClientOperationsMixin +from ._utils.serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class SecurityDomainClient(_SecurityDomainClientOperationsMixin): + """SecurityDomainClient. + + :param vault_base_url: Required. + :type vault_base_url: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is "7.5". Note + that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, vault_base_url: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "{vaultBaseUrl}" + self._config = SecurityDomainClientConfiguration(vault_base_url=vault_base_url, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_configuration.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_configuration.py new file mode 100644 index 000000000000..1a036a706db4 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_configuration.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class SecurityDomainClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for SecurityDomainClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param vault_base_url: Required. + :type vault_base_url: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is "7.5". Note + that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, vault_base_url: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "7.5") + + if vault_base_url is None: + raise ValueError("Parameter 'vault_base_url' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.vault_base_url = vault_base_url + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://vault.azure.net/.default"]) + kwargs.setdefault("sdk_moniker", "keyvault-securitydomain/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/__init__.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/_operations.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/_operations.py new file mode 100644 index 000000000000..5993207cfd92 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/_operations.py @@ -0,0 +1,634 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, Dict, IO, Iterator, Optional, TypeVar, Union, cast, overload + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .. import models as _models1 +from ...keyvault import models as _keyvault_models3 +from .._configuration import SecurityDomainClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from .._utils.serialization import Serializer +from .._utils.utils import ClientMixinABC + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_security_domain_get_download_status_request(**kwargs: Any) -> HttpRequest: # pylint: disable=name-too-long + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.5")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/securitydomain/download/pending" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_security_domain_download_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.5")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/securitydomain/download" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_security_domain_get_upload_status_request(**kwargs: Any) -> HttpRequest: # pylint: disable=name-too-long + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.5")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/securitydomain/upload/pending" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_security_domain_upload_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.5")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/securitydomain/upload" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_security_domain_get_transfer_key_request(**kwargs: Any) -> HttpRequest: # pylint: disable=name-too-long + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "7.5")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/securitydomain/upload" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class _SecurityDomainClientOperationsMixin( + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], SecurityDomainClientConfiguration] +): + + @distributed_trace + def get_download_status(self, **kwargs: Any) -> _keyvault_models3.SecurityDomainOperationStatus: + """Retrieves the Security Domain download operation status. + + :return: SecurityDomainOperationStatus. The SecurityDomainOperationStatus is compatible with + MutableMapping + :rtype: ~keyvault.models.SecurityDomainOperationStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_keyvault_models3.SecurityDomainOperationStatus] = kwargs.pop("cls", None) + + _request = build_security_domain_get_download_status_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_keyvault_models3.SecurityDomainOperationStatus, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _download_initial( + self, certificate_info_object: Union[_keyvault_models3.CertificateInfo, JSON, IO[bytes]], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(certificate_info_object, (IOBase, bytes)): + _content = certificate_info_object + else: + _content = json.dumps(certificate_info_object, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_security_domain_download_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def _begin_download( + self, + certificate_info_object: _keyvault_models3.CertificateInfo, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: ... + @overload + def _begin_download( + self, certificate_info_object: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: ... + @overload + def _begin_download( + self, certificate_info_object: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: ... + + @distributed_trace + def _begin_download( + self, certificate_info_object: Union[_keyvault_models3.CertificateInfo, JSON, IO[bytes]], **kwargs: Any + ) -> LROPoller[None]: + """Retrieves the Security Domain from the managed HSM. Calling this endpoint can be used to + activate a provisioned managed HSM resource. + + :param certificate_info_object: The Security Domain download operation requires customer to + provide N certificates (minimum 3 and maximum 10) containing a public key in JWK format. Is one + of the following types: CertificateInfo, JSON, IO[bytes] Required. + :type certificate_info_object: ~keyvault.models.CertificateInfo or JSON or IO[bytes] + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._download_initial( + certificate_info_object=certificate_info_object, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def get_upload_status(self, **kwargs: Any) -> _keyvault_models3.SecurityDomainOperationStatus: + """Get Security Domain upload operation status. + + :return: SecurityDomainOperationStatus. The SecurityDomainOperationStatus is compatible with + MutableMapping + :rtype: ~keyvault.models.SecurityDomainOperationStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_keyvault_models3.SecurityDomainOperationStatus] = kwargs.pop("cls", None) + + _request = build_security_domain_get_upload_status_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_keyvault_models3.SecurityDomainOperationStatus, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _upload_initial( + self, security_domain: Union[_keyvault_models3.SecurityDomain, JSON, IO[bytes]], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(security_domain, (IOBase, bytes)): + _content = security_domain + else: + _content = json.dumps(security_domain, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_security_domain_upload_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def _begin_upload( + self, + security_domain: _keyvault_models3.SecurityDomain, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_keyvault_models3.SecurityDomainOperationStatus]: ... + @overload + def _begin_upload( + self, security_domain: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[_keyvault_models3.SecurityDomainOperationStatus]: ... + @overload + def _begin_upload( + self, security_domain: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[_keyvault_models3.SecurityDomainOperationStatus]: ... + + @distributed_trace + def _begin_upload( + self, security_domain: Union[_keyvault_models3.SecurityDomain, JSON, IO[bytes]], **kwargs: Any + ) -> LROPoller[_keyvault_models3.SecurityDomainOperationStatus]: + """Restore the provided Security Domain. + + :param security_domain: The Security Domain to be restored. Is one of the following types: + SecurityDomain, JSON, IO[bytes] Required. + :type security_domain: ~keyvault.models.SecurityDomain or JSON or IO[bytes] + :return: An instance of LROPoller that returns SecurityDomainOperationStatus. The + SecurityDomainOperationStatus is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~keyvault.models.SecurityDomainOperationStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_keyvault_models3.SecurityDomainOperationStatus] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._upload_initial( + security_domain=security_domain, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = _deserialize(_keyvault_models3.SecurityDomainOperationStatus, response.json()) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_keyvault_models2.SecurityDomainOperationStatus].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_keyvault_models2.SecurityDomainOperationStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @distributed_trace + def get_transfer_key(self, **kwargs: Any) -> _keyvault_models3.TransferKey: + """Retrieve Security Domain transfer key. + + :return: TransferKey. The TransferKey is compatible with MutableMapping + :rtype: ~keyvault.models.TransferKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_keyvault_models3.TransferKey] = kwargs.pop("cls", None) + + _request = build_security_domain_get_transfer_key_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_keyvault_models3.TransferKey, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/_patch.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_patch.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/__init__.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/model_base.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/model_base.py new file mode 100644 index 000000000000..49d5c7259389 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/model_base.py @@ -0,0 +1,1232 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, value, module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + value: typing.Any, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, value) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/serialization.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/serialization.py new file mode 100644 index 000000000000..eb86ea23c965 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/serialization.py @@ -0,0 +1,2032 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/utils.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/utils.py new file mode 100644 index 000000000000..35c9c836f85f --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_utils/utils.py @@ -0,0 +1,25 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import Generic, TYPE_CHECKING, TypeVar + +if TYPE_CHECKING: + from .serialization import Deserializer, Serializer + + +TClient = TypeVar("TClient") +TConfig = TypeVar("TConfig") + + +class ClientMixinABC(ABC, Generic[TClient, TConfig]): + """DO NOT use this class. It is for internal typing use only.""" + + _client: TClient + _config: TConfig + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_version.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/__init__.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/__init__.py new file mode 100644 index 000000000000..a2c929f88bc6 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import SecurityDomainClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "SecurityDomainClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_client.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_client.py new file mode 100644 index 000000000000..af44f6e7df9b --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_client.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import SecurityDomainClientConfiguration +from ._operations._operations import _SecurityDomainClientOperationsMixin + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class SecurityDomainClient(_SecurityDomainClientOperationsMixin): + """SecurityDomainClient. + + :param vault_base_url: Required. + :type vault_base_url: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is "7.5". Note + that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, vault_base_url: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "{vaultBaseUrl}" + self._config = SecurityDomainClientConfiguration(vault_base_url=vault_base_url, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_configuration.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_configuration.py new file mode 100644 index 000000000000..bd16d340efbe --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_configuration.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class SecurityDomainClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for SecurityDomainClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param vault_base_url: Required. + :type vault_base_url: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is "7.5". Note + that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, vault_base_url: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "7.5") + + if vault_base_url is None: + raise ValueError("Parameter 'vault_base_url' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.vault_base_url = vault_base_url + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://vault.azure.net/.default"]) + kwargs.setdefault("sdk_moniker", "keyvault-securitydomain/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/__init__.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/_operations.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/_operations.py new file mode 100644 index 000000000000..79ed060d4393 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/_operations.py @@ -0,0 +1,539 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload + +from azure.core import AsyncPipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ... import models as _models2 +from ....keyvault import models as _keyvault_models4 +from ..._operations._operations import ( + build_security_domain_download_request, + build_security_domain_get_download_status_request, + build_security_domain_get_transfer_key_request, + build_security_domain_get_upload_status_request, + build_security_domain_upload_request, +) +from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._utils.utils import ClientMixinABC +from .._configuration import SecurityDomainClientConfiguration + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class _SecurityDomainClientOperationsMixin( + ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], SecurityDomainClientConfiguration] +): + + @distributed_trace_async + async def get_download_status(self, **kwargs: Any) -> _keyvault_models4.SecurityDomainOperationStatus: + """Retrieves the Security Domain download operation status. + + :return: SecurityDomainOperationStatus. The SecurityDomainOperationStatus is compatible with + MutableMapping + :rtype: ~keyvault.models.SecurityDomainOperationStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_keyvault_models4.SecurityDomainOperationStatus] = kwargs.pop("cls", None) + + _request = build_security_domain_get_download_status_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_keyvault_models4.SecurityDomainOperationStatus, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _download_initial( + self, certificate_info_object: Union[_keyvault_models4.CertificateInfo, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(certificate_info_object, (IOBase, bytes)): + _content = certificate_info_object + else: + _content = json.dumps(certificate_info_object, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_security_domain_download_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def _begin_download( + self, + certificate_info_object: _keyvault_models4.CertificateInfo, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: ... + @overload + async def _begin_download( + self, certificate_info_object: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[None]: ... + @overload + async def _begin_download( + self, certificate_info_object: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[None]: ... + + @distributed_trace_async + async def _begin_download( + self, certificate_info_object: Union[_keyvault_models4.CertificateInfo, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[None]: + """Retrieves the Security Domain from the managed HSM. Calling this endpoint can be used to + activate a provisioned managed HSM resource. + + :param certificate_info_object: The Security Domain download operation requires customer to + provide N certificates (minimum 3 and maximum 10) containing a public key in JWK format. Is one + of the following types: CertificateInfo, JSON, IO[bytes] Required. + :type certificate_info_object: ~keyvault.models.CertificateInfo or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._download_initial( + certificate_info_object=certificate_info_object, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace_async + async def get_upload_status(self, **kwargs: Any) -> _keyvault_models4.SecurityDomainOperationStatus: + """Get Security Domain upload operation status. + + :return: SecurityDomainOperationStatus. The SecurityDomainOperationStatus is compatible with + MutableMapping + :rtype: ~keyvault.models.SecurityDomainOperationStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_keyvault_models4.SecurityDomainOperationStatus] = kwargs.pop("cls", None) + + _request = build_security_domain_get_upload_status_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_keyvault_models4.SecurityDomainOperationStatus, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _upload_initial( + self, security_domain: Union[_keyvault_models4.SecurityDomain, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(security_domain, (IOBase, bytes)): + _content = security_domain + else: + _content = json.dumps(security_domain, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_security_domain_upload_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def _begin_upload( + self, + security_domain: _keyvault_models4.SecurityDomain, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_keyvault_models4.SecurityDomainOperationStatus]: ... + @overload + async def _begin_upload( + self, security_domain: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[_keyvault_models4.SecurityDomainOperationStatus]: ... + @overload + async def _begin_upload( + self, security_domain: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[_keyvault_models4.SecurityDomainOperationStatus]: ... + + @distributed_trace_async + async def _begin_upload( + self, security_domain: Union[_keyvault_models4.SecurityDomain, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[_keyvault_models4.SecurityDomainOperationStatus]: + """Restore the provided Security Domain. + + :param security_domain: The Security Domain to be restored. Is one of the following types: + SecurityDomain, JSON, IO[bytes] Required. + :type security_domain: ~keyvault.models.SecurityDomain or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns SecurityDomainOperationStatus. The + SecurityDomainOperationStatus is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~keyvault.models.SecurityDomainOperationStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_keyvault_models4.SecurityDomainOperationStatus] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._upload_initial( + security_domain=security_domain, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = _deserialize(_keyvault_models4.SecurityDomainOperationStatus, response.json()) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_keyvault_models2.SecurityDomainOperationStatus].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_keyvault_models2.SecurityDomainOperationStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @distributed_trace_async + async def get_transfer_key(self, **kwargs: Any) -> _keyvault_models4.TransferKey: + """Retrieve Security Domain transfer key. + + :return: TransferKey. The TransferKey is compatible with MutableMapping + :rtype: ~keyvault.models.TransferKey + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_keyvault_models4.TransferKey] = kwargs.pop("cls", None) + + _request = build_security_domain_get_transfer_key_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "vaultBaseUrl": self._serialize.url( + "self._config.vault_base_url", self._config.vault_base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.KeyVaultError, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_keyvault_models4.TransferKey, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/_patch.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_patch.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/aio/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/__init__.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/__init__.py new file mode 100644 index 000000000000..deaef15370f8 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + KeyVaultError, + KeyVaultErrorError, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "KeyVaultError", + "KeyVaultErrorError", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/_models.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/_models.py new file mode 100644 index 000000000000..e2d87d9b39c1 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/_models.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Optional, TYPE_CHECKING + +from .._utils.model_base import Model as _Model, rest_field + +if TYPE_CHECKING: + from .. import models as _models + + +class KeyVaultError(_Model): + """The key vault error exception. + + :ivar error: The key vault server error. + :vartype error: ~clientcustomizations.models.KeyVaultErrorError + """ + + error: Optional["_models.KeyVaultErrorError"] = rest_field(visibility=["read"]) + """The key vault server error.""" + + +class KeyVaultErrorError(_Model): + """KeyVaultErrorError. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar inner_error: The key vault server error. + :vartype inner_error: ~clientcustomizations.models.KeyVaultErrorError + """ + + code: Optional[str] = rest_field(visibility=["read"]) + """The error code.""" + message: Optional[str] = rest_field(visibility=["read"]) + """The error message.""" + inner_error: Optional["_models.KeyVaultErrorError"] = rest_field(name="innererror", visibility=["read"]) + """The key vault server error.""" diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/_patch.py b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/models/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/py.typed b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/clientcustomizations/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/keyvault/azure-keyvault-securitydomain/keyvault/__init__.py b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/__init__.py b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/__init__.py new file mode 100644 index 000000000000..4963e33e8ff9 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/__init__.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + CertificateInfo, + SecurityDomain, + SecurityDomainJsonWebKey, + SecurityDomainOperationStatus, + TransferKey, +) + +from ._enums import ( # type: ignore + OperationStatus, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "CertificateInfo", + "SecurityDomain", + "SecurityDomainJsonWebKey", + "SecurityDomainOperationStatus", + "TransferKey", + "OperationStatus", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_enums.py b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_enums.py new file mode 100644 index 000000000000..9fb7878c20a4 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_enums.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class OperationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Operation status.""" + + SUCCESS = "Success" + """The operation succeeded.""" + IN_PROGRESS = "InProgress" + """The operation is in progress.""" + FAILED = "Failed" + """The operation failed.""" diff --git a/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_models.py b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_models.py new file mode 100644 index 000000000000..f5b8bb928745 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_models.py @@ -0,0 +1,230 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +from typing import Any, List, Mapping, Optional, TYPE_CHECKING, Union, overload + +from ...clientcustomizations._utils.model_base import Model as _Model, rest_field + +if TYPE_CHECKING: + from .. import models as _models + + +class CertificateInfo(_Model): + """The Security Domain download operation requires customer to provide N certificates (minimum 3 + and maximum 10) containing a public key in JWK format. + + :ivar certificates: Certificates needed from customer. Required. + :vartype certificates: list[~keyvault.models.SecurityDomainJsonWebKey] + :ivar required: Customer to specify the number of certificates (minimum 2 and maximum 10) to + restore Security Domain. + :vartype required: int + """ + + certificates: List["_models.SecurityDomainJsonWebKey"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Certificates needed from customer. Required.""" + required: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Customer to specify the number of certificates (minimum 2 and maximum 10) to restore Security + Domain.""" + + @overload + def __init__( + self, + *, + certificates: List["_models.SecurityDomainJsonWebKey"], + required: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SecurityDomain(_Model): + """The Security Domain. + + :ivar value: The Security Domain. Required. + :vartype value: str + """ + + value: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The Security Domain. Required.""" + + @overload + def __init__( + self, + *, + value: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SecurityDomainJsonWebKey(_Model): + """A JSON Web Key (JWK) for use in a security domain operation. + + :ivar kid: Key identifier. Required. + :vartype kid: str + :ivar kty: JsonWebKey Key Type (kty), as defined in + `https://tools.ietf.org/html/draft-ietf-jose-json-web-algorithms-40 + `_. For Security Domain + this value must be RSA. Required. + :vartype kty: str + :ivar key_ops: Supported key operations. Required. + :vartype key_ops: list[str] + :ivar n: RSA modulus. Required. + :vartype n: str + :ivar e: RSA public exponent. Required. + :vartype e: str + :ivar x5_c: X509 certificate chain parameter. Required. + :vartype x5_c: list[str] + :ivar use: Public Key Use Parameter. This is optional and if present must be enc. + :vartype use: str + :ivar x5_t: X509 certificate SHA1 thumbprint. This is optional. + :vartype x5_t: str + :ivar x5_t_s256: X509 certificate SHA256 thumbprint. Required. + :vartype x5_t_s256: str + :ivar alg: Algorithm intended for use with the key. Required. + :vartype alg: str + """ + + kid: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Key identifier. Required.""" + kty: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """JsonWebKey Key Type (kty), as defined in + `https://tools.ietf.org/html/draft-ietf-jose-json-web-algorithms-40 + `_. For Security Domain + this value must be RSA. Required.""" + key_ops: List[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Supported key operations. Required.""" + n: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """RSA modulus. Required.""" + e: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """RSA public exponent. Required.""" + x5_c: List[str] = rest_field(name="x5c", visibility=["read", "create", "update", "delete", "query"]) + """X509 certificate chain parameter. Required.""" + use: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Public Key Use Parameter. This is optional and if present must be enc.""" + x5_t: Optional[str] = rest_field(name="x5t", visibility=["read", "create", "update", "delete", "query"]) + """X509 certificate SHA1 thumbprint. This is optional.""" + x5_t_s256: str = rest_field(name="x5t#S256", visibility=["read", "create", "update", "delete", "query"]) + """X509 certificate SHA256 thumbprint. Required.""" + alg: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Algorithm intended for use with the key. Required.""" + + @overload + def __init__( + self, + *, + kid: str, + kty: str, + key_ops: List[str], + n: str, + e: str, + x5_c: List[str], + x5_t_s256: str, + alg: str, + use: Optional[str] = None, + x5_t: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SecurityDomainOperationStatus(_Model): + """The Security Domain operation status. + + :ivar status: Operation status. Known values are: "Success", "InProgress", and "Failed". + :vartype status: str or ~keyvault.models.OperationStatus + :ivar status_details: Details of the operation status. + :vartype status_details: str + """ + + status: Optional[Union[str, "_models.OperationStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Operation status. Known values are: \"Success\", \"InProgress\", and \"Failed\".""" + status_details: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Details of the operation status.""" + + @overload + def __init__( + self, + *, + status: Optional[Union[str, "_models.OperationStatus"]] = None, + status_details: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TransferKey(_Model): + """Security Domain transfer key. + + :ivar key_format: Specifies the format of the transfer key. + :vartype key_format: str + :ivar transfer_key: Specifies the transfer key in JWK format. Required. + :vartype transfer_key: ~keyvault.models.SecurityDomainJsonWebKey + """ + + key_format: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Specifies the format of the transfer key.""" + transfer_key: "_models.SecurityDomainJsonWebKey" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the transfer key in JWK format. Required.""" + + @overload + def __init__( + self, + *, + transfer_key: "_models.SecurityDomainJsonWebKey", + key_format: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_patch.py b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/keyvault/azure-keyvault-securitydomain/keyvault/models/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/keyvault/azure-keyvault-securitydomain/setup.py b/sdk/keyvault/azure-keyvault-securitydomain/setup.py index 227484eaaeda..5b26df274344 100644 --- a/sdk/keyvault/azure-keyvault-securitydomain/setup.py +++ b/sdk/keyvault/azure-keyvault-securitydomain/setup.py @@ -14,9 +14,10 @@ PACKAGE_NAME = "azure-keyvault-securitydomain" PACKAGE_PPRINT_NAME = "Azure Keyvault Securitydomain" +PACKAGE_NAMESPACE = "clientcustomizations" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: @@ -46,25 +47,21 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", "License :: OSI Approved :: MIT License", ], zip_safe=False, packages=find_packages( exclude=[ "tests", - # Exclude packages that will be covered by PEP420 or nspkg - "azure", - "azure.keyvault", ] ), include_package_data=True, package_data={ - "azure.keyvault.securitydomain": ["py.typed"], + "clientcustomizations": ["py.typed"], }, install_requires=[ "isodate>=0.6.1", - "azure-core>=1.31.0", + "azure-core>=1.30.0", "typing-extensions>=4.6.0", ], python_requires=">=3.9", diff --git a/sdk/loadtesting/azure-developer-loadtesting/MANIFEST.in b/sdk/loadtesting/azure-developer-loadtesting/MANIFEST.in index 6ebb7e55a108..65d89afd85c0 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/MANIFEST.in +++ b/sdk/loadtesting/azure-developer-loadtesting/MANIFEST.in @@ -1,7 +1,5 @@ include *.md include LICENSE -include azure/developer/loadtesting/py.typed +include customizations/py.typed recursive-include tests *.py recursive-include samples *.py *.md -include azure/__init__.py -include azure/developer/__init__.py \ No newline at end of file diff --git a/sdk/loadtesting/azure-developer-loadtesting/_metadata.json b/sdk/loadtesting/azure-developer-loadtesting/_metadata.json new file mode 100644 index 000000000000..ddac30e27dab --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "2024-12-01-preview" +} \ No newline at end of file diff --git a/sdk/loadtesting/azure-developer-loadtesting/apiview-properties.json b/sdk/loadtesting/azure-developer-loadtesting/apiview-properties.json new file mode 100644 index 000000000000..9b0f07e76da7 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/apiview-properties.json @@ -0,0 +1,133 @@ +{ + "CrossLanguagePackageId": "Microsoft.LoadTestService", + "CrossLanguageDefinitionId": { + "microsoft.loadtestservice.models.AppComponent": "Microsoft.LoadTestService.AppComponent", + "microsoft.loadtestservice.models.ArtifactsContainerInfo": "Microsoft.LoadTestService.ArtifactsContainerInfo", + "microsoft.loadtestservice.models.AutoStopCriteria": "Microsoft.LoadTestService.AutoStopCriteria", + "microsoft.loadtestservice.models.CertificateMetadata": "Microsoft.LoadTestService.CertificateMetadata", + "microsoft.loadtestservice.models.DimensionFilter": "Microsoft.LoadTestService.DimensionFilter", + "microsoft.loadtestservice.models.DimensionValue": "Microsoft.LoadTestService.DimensionValue", + "microsoft.loadtestservice.models.ErrorDetails": "Microsoft.LoadTestService.ErrorDetails", + "microsoft.loadtestservice.models.FunctionFlexConsumptionResourceConfiguration": "Microsoft.LoadTestService.FunctionFlexConsumptionResourceConfiguration", + "microsoft.loadtestservice.models.TargetResourceConfigurations": "Microsoft.LoadTestService.TargetResourceConfigurations", + "microsoft.loadtestservice.models.FunctionFlexConsumptionTargetResourceConfigurations": "Microsoft.LoadTestService.FunctionFlexConsumptionTargetResourceConfigurations", + "microsoft.loadtestservice.models.LoadTestConfiguration": "Microsoft.LoadTestService.LoadTestConfiguration", + "microsoft.loadtestservice.models.MetricAvailability": "Microsoft.LoadTestService.MetricAvailability", + "microsoft.loadtestservice.models.MetricDefinition": "Microsoft.LoadTestService.MetricDefinition", + "microsoft.loadtestservice.models.MetricDefinitionCollection": "Microsoft.LoadTestService.MetricDefinitionCollection", + "microsoft.loadtestservice.models.MetricNamespace": "Microsoft.LoadTestService.MetricNamespace", + "microsoft.loadtestservice.models.MetricNamespaceCollection": "Microsoft.LoadTestService.MetricNamespaceCollection", + "microsoft.loadtestservice.models.MetricRequestPayload": "Microsoft.LoadTestService.MetricRequestPayload", + "microsoft.loadtestservice.models.MetricValue": "Microsoft.LoadTestService.MetricValue", + "microsoft.loadtestservice.models.NameAndDescription": "Microsoft.LoadTestService.NameAndDescription", + "microsoft.loadtestservice.models.OptionalLoadTestConfiguration": "Microsoft.LoadTestService.OptionalLoadTestConfiguration", + "microsoft.loadtestservice.models.PassFailCriteria": "Microsoft.LoadTestService.PassFailCriteria", + "microsoft.loadtestservice.models.PassFailMetric": "Microsoft.LoadTestService.PassFailMetric", + "microsoft.loadtestservice.models.PassFailServerMetric": "Microsoft.LoadTestService.PassFailServerMetric", + "microsoft.loadtestservice.models.RegionalConfiguration": "Microsoft.LoadTestService.RegionalConfiguration", + "microsoft.loadtestservice.models.ResourceMetric": "Microsoft.LoadTestService.ResourceMetric", + "microsoft.loadtestservice.models.Secret": "Microsoft.LoadTestService.Secret", + "microsoft.loadtestservice.models.Test": "Microsoft.LoadTestService.Test", + "microsoft.loadtestservice.models.TestAppComponents": "Microsoft.LoadTestService.TestAppComponents", + "microsoft.loadtestservice.models.TestFileInfo": "Microsoft.LoadTestService.TestFileInfo", + "microsoft.loadtestservice.models.TestInputArtifacts": "Microsoft.LoadTestService.TestInputArtifacts", + "microsoft.loadtestservice.models.TestProfile": "Microsoft.LoadTestService.TestProfile", + "microsoft.loadtestservice.models.TestProfileRun": "Microsoft.LoadTestService.TestProfileRun", + "microsoft.loadtestservice.models.TestProfileRunRecommendation": "Microsoft.LoadTestService.TestProfileRunRecommendation", + "microsoft.loadtestservice.models.TestRun": "Microsoft.LoadTestService.TestRun", + "microsoft.loadtestservice.models.TestRunAppComponents": "Microsoft.LoadTestService.TestRunAppComponents", + "microsoft.loadtestservice.models.TestRunArtifacts": "Microsoft.LoadTestService.TestRunArtifacts", + "microsoft.loadtestservice.models.TestRunDetail": "Microsoft.LoadTestService.TestRunDetail", + "microsoft.loadtestservice.models.TestRunFileInfo": "Microsoft.LoadTestService.TestRunFileInfo", + "microsoft.loadtestservice.models.TestRunInputArtifacts": "Microsoft.LoadTestService.TestRunInputArtifacts", + "microsoft.loadtestservice.models.TestRunOutputArtifacts": "Microsoft.LoadTestService.TestRunOutputArtifacts", + "microsoft.loadtestservice.models.TestRunServerMetricsConfiguration": "Microsoft.LoadTestService.TestRunServerMetricsConfiguration", + "microsoft.loadtestservice.models.TestRunStatistics": "Microsoft.LoadTestService.TestRunStatistics", + "microsoft.loadtestservice.models.TestServerMetricsConfiguration": "Microsoft.LoadTestService.TestServerMetricsConfiguration", + "microsoft.loadtestservice.models.TimeSeriesElement": "Microsoft.LoadTestService.TimeSeriesElement", + "customizations.models.PFMetrics": "Microsoft.LoadTestService.PFMetrics", + "customizations.models.PassFailAggregationFunction": "Microsoft.LoadTestService.PassFailAggregationFunction", + "customizations.models.PassFailAction": "Microsoft.LoadTestService.PassFailAction", + "customizations.models.PassFailResult": "Microsoft.LoadTestService.PassFailResult", + "customizations.models.SecretType": "Microsoft.LoadTestService.SecretType", + "customizations.models.CertificateType": "Microsoft.LoadTestService.CertificateType", + "customizations.models.FileType": "Microsoft.LoadTestService.FileType", + "customizations.models.FileValidationStatus": "Microsoft.LoadTestService.FileValidationStatus", + "customizations.models.TestKind": "Microsoft.LoadTestService.TestKind", + "customizations.models.ManagedIdentityType": "Microsoft.LoadTestService.ManagedIdentityType", + "customizations.models.ResourceKind": "Microsoft.LoadTestService.ResourceKind", + "customizations.models.PassFailTestResult": "Microsoft.LoadTestService.PassFailTestResult", + "customizations.models.TestRunStatus": "Microsoft.LoadTestService.TestRunStatus", + "customizations.models.RequestDataLevel": "Microsoft.LoadTestService.RequestDataLevel", + "customizations.models.CreatedByType": "Microsoft.LoadTestService.CreatedByType", + "customizations.models.TimeGrain": "Microsoft.LoadTestService.TimeGrain", + "customizations.models.Aggregation": "Microsoft.LoadTestService.Aggregation", + "customizations.models.MetricUnit": "Microsoft.LoadTestService.MetricUnit", + "customizations.models.TestProfileRunStatus": "Microsoft.LoadTestService.TestProfileRunStatus", + "customizations.models.RecommendationCategory": "Microsoft.LoadTestService.RecommendationCategory", + "customizations.LoadTestAdministrationClient.create_or_update_test": "Customizations.AdministrationOperations.createOrUpdateTest", + "customizations.aio.LoadTestAdministrationClient.create_or_update_test": "Customizations.AdministrationOperations.createOrUpdateTest", + "customizations.LoadTestAdministrationClient.create_or_update_app_components": "Customizations.AdministrationOperations.createOrUpdateAppComponents", + "customizations.aio.LoadTestAdministrationClient.create_or_update_app_components": "Customizations.AdministrationOperations.createOrUpdateAppComponents", + "customizations.LoadTestAdministrationClient.create_or_update_server_metrics_config": "Customizations.AdministrationOperations.createOrUpdateServerMetricsConfig", + "customizations.aio.LoadTestAdministrationClient.create_or_update_server_metrics_config": "Customizations.AdministrationOperations.createOrUpdateServerMetricsConfig", + "customizations.LoadTestAdministrationClient.get_app_components": "Customizations.AdministrationOperations.getAppComponents", + "customizations.aio.LoadTestAdministrationClient.get_app_components": "Customizations.AdministrationOperations.getAppComponents", + "customizations.LoadTestAdministrationClient.get_server_metrics_config": "Customizations.AdministrationOperations.getServerMetricsConfig", + "customizations.aio.LoadTestAdministrationClient.get_server_metrics_config": "Customizations.AdministrationOperations.getServerMetricsConfig", + "customizations.LoadTestAdministrationClient.get_test": "Customizations.AdministrationOperations.getTest", + "customizations.aio.LoadTestAdministrationClient.get_test": "Customizations.AdministrationOperations.getTest", + "customizations.LoadTestAdministrationClient.get_test_file": "Customizations.AdministrationOperations.getTestFile", + "customizations.aio.LoadTestAdministrationClient.get_test_file": "Customizations.AdministrationOperations.getTestFile", + "customizations.LoadTestAdministrationClient.list_test_files": "Customizations.AdministrationOperations.listTestFiles", + "customizations.aio.LoadTestAdministrationClient.list_test_files": "Customizations.AdministrationOperations.listTestFiles", + "customizations.LoadTestAdministrationClient.list_tests": "Customizations.AdministrationOperations.listTests", + "customizations.aio.LoadTestAdministrationClient.list_tests": "Customizations.AdministrationOperations.listTests", + "customizations.LoadTestAdministrationClient.delete_test_file": "Customizations.AdministrationOperations.deleteTestFile", + "customizations.aio.LoadTestAdministrationClient.delete_test_file": "Customizations.AdministrationOperations.deleteTestFile", + "customizations.LoadTestAdministrationClient.delete_test": "Customizations.AdministrationOperations.deleteTest", + "customizations.aio.LoadTestAdministrationClient.delete_test": "Customizations.AdministrationOperations.deleteTest", + "customizations.LoadTestAdministrationClient.create_or_update_test_profile": "Customizations.AdministrationOperations.createOrUpdateTestProfile", + "customizations.aio.LoadTestAdministrationClient.create_or_update_test_profile": "Customizations.AdministrationOperations.createOrUpdateTestProfile", + "customizations.LoadTestAdministrationClient.delete_test_profile": "Customizations.AdministrationOperations.deleteTestProfile", + "customizations.aio.LoadTestAdministrationClient.delete_test_profile": "Customizations.AdministrationOperations.deleteTestProfile", + "customizations.LoadTestAdministrationClient.get_test_profile": "Customizations.AdministrationOperations.getTestProfile", + "customizations.aio.LoadTestAdministrationClient.get_test_profile": "Customizations.AdministrationOperations.getTestProfile", + "customizations.LoadTestAdministrationClient.list_test_profiles": "Customizations.AdministrationOperations.listTestProfiles", + "customizations.aio.LoadTestAdministrationClient.list_test_profiles": "Customizations.AdministrationOperations.listTestProfiles", + "customizations.LoadTestRunClient.create_or_update_app_components": "Customizations.TestRunOperations.createOrUpdateAppComponents", + "customizations.aio.LoadTestRunClient.create_or_update_app_components": "Customizations.TestRunOperations.createOrUpdateAppComponents", + "customizations.LoadTestRunClient.create_or_update_server_metrics_config": "Customizations.TestRunOperations.createOrUpdateServerMetricsConfig", + "customizations.aio.LoadTestRunClient.create_or_update_server_metrics_config": "Customizations.TestRunOperations.createOrUpdateServerMetricsConfig", + "customizations.LoadTestRunClient.delete_test_run": "Customizations.TestRunOperations.deleteTestRun", + "customizations.aio.LoadTestRunClient.delete_test_run": "Customizations.TestRunOperations.deleteTestRun", + "customizations.LoadTestRunClient.get_app_components": "Customizations.TestRunOperations.getAppComponents", + "customizations.aio.LoadTestRunClient.get_app_components": "Customizations.TestRunOperations.getAppComponents", + "customizations.LoadTestRunClient.get_server_metrics_config": "Customizations.TestRunOperations.getServerMetricsConfig", + "customizations.aio.LoadTestRunClient.get_server_metrics_config": "Customizations.TestRunOperations.getServerMetricsConfig", + "customizations.LoadTestRunClient.get_test_run": "Customizations.TestRunOperations.getTestRun", + "customizations.aio.LoadTestRunClient.get_test_run": "Customizations.TestRunOperations.getTestRun", + "customizations.LoadTestRunClient.get_test_run_file": "Customizations.TestRunOperations.getTestRunFile", + "customizations.aio.LoadTestRunClient.get_test_run_file": "Customizations.TestRunOperations.getTestRunFile", + "customizations.LoadTestRunClient.list_metric_dimension_values": "Customizations.TestRunOperations.listMetricDimensionValues", + "customizations.aio.LoadTestRunClient.list_metric_dimension_values": "Customizations.TestRunOperations.listMetricDimensionValues", + "customizations.LoadTestRunClient.get_metric_definitions": "Customizations.TestRunOperations.listMetricDefinitions", + "customizations.aio.LoadTestRunClient.get_metric_definitions": "Customizations.TestRunOperations.listMetricDefinitions", + "customizations.LoadTestRunClient.get_metric_namespaces": "Customizations.TestRunOperations.listMetricNamespaces", + "customizations.aio.LoadTestRunClient.get_metric_namespaces": "Customizations.TestRunOperations.listMetricNamespaces", + "customizations.LoadTestRunClient.list_metrics": "Customizations.TestRunOperations.listMetrics", + "customizations.aio.LoadTestRunClient.list_metrics": "Customizations.TestRunOperations.listMetrics", + "customizations.LoadTestRunClient.list_test_runs": "Customizations.TestRunOperations.listTestRuns", + "customizations.aio.LoadTestRunClient.list_test_runs": "Customizations.TestRunOperations.listTestRuns", + "customizations.LoadTestRunClient.stop_test_run": "Customizations.TestRunOperations.stop", + "customizations.aio.LoadTestRunClient.stop_test_run": "Customizations.TestRunOperations.stop", + "customizations.LoadTestRunClient.delete_test_profile_run": "Customizations.TestRunOperations.deleteTestProfileRun", + "customizations.aio.LoadTestRunClient.delete_test_profile_run": "Customizations.TestRunOperations.deleteTestProfileRun", + "customizations.LoadTestRunClient.get_test_profile_run": "Customizations.TestRunOperations.getTestProfileRun", + "customizations.aio.LoadTestRunClient.get_test_profile_run": "Customizations.TestRunOperations.getTestProfileRun", + "customizations.LoadTestRunClient.list_test_profile_runs": "Customizations.TestRunOperations.listTestProfileRuns", + "customizations.aio.LoadTestRunClient.list_test_profile_runs": "Customizations.TestRunOperations.listTestProfileRuns", + "customizations.LoadTestRunClient.stop_test_profile_run": "Customizations.TestRunOperations.stopTestProfileRun", + "customizations.aio.LoadTestRunClient.stop_test_profile_run": "Customizations.TestRunOperations.stopTestProfileRun" + } +} \ No newline at end of file diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/__init__.py new file mode 100644 index 000000000000..31d5b087f062 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/__init__.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import LoadTestAdministrationClient # type: ignore +from ._client import LoadTestRunClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "LoadTestAdministrationClient", + "LoadTestRunClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_client.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_client.py new file mode 100644 index 000000000000..584dd6c9a223 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_client.py @@ -0,0 +1,176 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from ._configuration import LoadTestAdministrationClientConfiguration, LoadTestRunClientConfiguration +from ._operations._operations import _LoadTestAdministrationClientOperationsMixin, _LoadTestRunClientOperationsMixin +from ._utils.serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class LoadTestAdministrationClient(_LoadTestAdministrationClientOperationsMixin): + """LoadTestAdministrationClient. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "https://{endpoint}" + self._config = LoadTestAdministrationClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) + + +class LoadTestRunClient(_LoadTestRunClientOperationsMixin): + """LoadTestRunClient. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "https://{endpoint}" + self._config = LoadTestRunClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_configuration.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_configuration.py new file mode 100644 index 000000000000..adf093acd20f --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_configuration.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class LoadTestAdministrationClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long + """Configuration for LoadTestAdministrationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cnt-prod.loadtesting.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "developer-loadtesting/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) + + +class LoadTestRunClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for LoadTestRunClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cnt-prod.loadtesting.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "developer-loadtesting/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_operations.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_operations.py new file mode 100644 index 000000000000..d31644f5785d --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_operations.py @@ -0,0 +1,4505 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +import datetime +from io import IOBase +import json +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from ...microsoft.loadtestservice import models as _microsoft_loadtestservice_models4 +from .._configuration import LoadTestAdministrationClientConfiguration, LoadTestRunClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize +from .._utils.serialization import Serializer +from .._utils.utils import ClientMixinABC +from .._validation import api_version_validation + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_load_test_administration_create_or_update_test_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_create_or_update_app_components_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/app-components" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_create_or_update_server_metrics_config_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/server-metrics-config" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_app_components_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/app-components" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_server_metrics_config_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/server-metrics-config" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_test_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_test_file_request( # pylint: disable=name-too-long + test_id: str, file_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/files/{fileName}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + "fileName": _SERIALIZER.url("file_name", file_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_list_test_files_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/files" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_list_tests_request( # pylint: disable=name-too-long + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if orderby is not None: + _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") + if search is not None: + _params["search"] = _SERIALIZER.query("search", search, "str") + if last_modified_start_time is not None: + _params["lastModifiedStartTime"] = _SERIALIZER.query( + "last_modified_start_time", last_modified_start_time, "iso-8601" + ) + if last_modified_end_time is not None: + _params["lastModifiedEndTime"] = _SERIALIZER.query("last_modified_end_time", last_modified_end_time, "iso-8601") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_begin_upload_test_file_request( # pylint: disable=name-too-long + test_id: str, + file_name: str, + *, + file_type: Optional[Union[str, _microsoft_loadtestservice_models4.FileType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/files/{fileName}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + "fileName": _SERIALIZER.url("file_name", file_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if file_type is not None: + _params["fileType"] = _SERIALIZER.query("file_type", file_type, "str") + + # Construct headers + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_delete_test_file_request( # pylint: disable=name-too-long + test_id: str, file_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/files/{fileName}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + "fileName": _SERIALIZER.url("file_name", file_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_delete_test_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_create_or_update_test_profile_request( # pylint: disable=name-too-long + test_profile_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profiles/{testProfileId}" + path_format_arguments = { + "testProfileId": _SERIALIZER.url("test_profile_id", test_profile_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_delete_test_profile_request( # pylint: disable=name-too-long + test_profile_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profiles/{testProfileId}" + path_format_arguments = { + "testProfileId": _SERIALIZER.url("test_profile_id", test_profile_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_test_profile_request( # pylint: disable=name-too-long + test_profile_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profiles/{testProfileId}" + path_format_arguments = { + "testProfileId": _SERIALIZER.url("test_profile_id", test_profile_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_list_test_profiles_request( # pylint: disable=name-too-long + *, + maxpagesize: Optional[int] = None, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + test_profile_ids: Optional[List[str]] = None, + test_ids: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profiles" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + if last_modified_start_time is not None: + _params["lastModifiedStartTime"] = _SERIALIZER.query( + "last_modified_start_time", last_modified_start_time, "iso-8601" + ) + if last_modified_end_time is not None: + _params["lastModifiedEndTime"] = _SERIALIZER.query("last_modified_end_time", last_modified_end_time, "iso-8601") + if test_profile_ids is not None: + _params["testProfileIds"] = _SERIALIZER.query("test_profile_ids", test_profile_ids, "[str]", div=",") + if test_ids is not None: + _params["testIds"] = _SERIALIZER.query("test_ids", test_ids, "[str]", div=",") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_begin_test_run_request( # pylint: disable=name-too-long + test_run_id: str, *, old_test_run_id: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if old_test_run_id is not None: + _params["oldTestRunId"] = _SERIALIZER.query("old_test_run_id", old_test_run_id, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_create_or_update_app_components_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/app-components" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_create_or_update_server_metrics_config_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/server-metrics-config" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_delete_test_run_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_app_components_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/app-components" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_server_metrics_config_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/server-metrics-config" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_test_run_request(test_run_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_test_run_file_request( # pylint: disable=name-too-long + test_run_id: str, file_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/files/{fileName}" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + "fileName": _SERIALIZER.url("file_name", file_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_list_metric_dimension_values_request( # pylint: disable=name-too-long + test_run_id: str, + name: str, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/metric-dimensions/{name}/values" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + _params["metricname"] = _SERIALIZER.query("metric_name", metric_name, "str") + if interval is not None: + _params["interval"] = _SERIALIZER.query("interval", interval, "str") + _params["metricNamespace"] = _SERIALIZER.query("metric_namespace", metric_namespace, "str") + _params["timespan"] = _SERIALIZER.query("time_interval", time_interval, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_metric_definitions_request( # pylint: disable=name-too-long + test_run_id: str, *, metric_namespace: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/metric-definitions" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + _params["metricNamespace"] = _SERIALIZER.query("metric_namespace", metric_namespace, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_metric_namespaces_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/metric-namespaces" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_list_metrics_request( + test_run_id: str, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/metrics" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if aggregation is not None: + _params["aggregation"] = _SERIALIZER.query("aggregation", aggregation, "str") + _params["metricname"] = _SERIALIZER.query("metric_name", metric_name, "str") + if interval is not None: + _params["interval"] = _SERIALIZER.query("interval", interval, "str") + _params["metricNamespace"] = _SERIALIZER.query("metric_namespace", metric_namespace, "str") + _params["timespan"] = _SERIALIZER.query("time_interval", time_interval, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_list_test_runs_request( # pylint: disable=name-too-long + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + test_id: Optional[str] = None, + execution_from: Optional[datetime.datetime] = None, + execution_to: Optional[datetime.datetime] = None, + status: Optional[str] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if orderby is not None: + _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") + if search is not None: + _params["search"] = _SERIALIZER.query("search", search, "str") + if test_id is not None: + _params["testId"] = _SERIALIZER.query("test_id", test_id, "str") + if execution_from is not None: + _params["executionFrom"] = _SERIALIZER.query("execution_from", execution_from, "iso-8601") + if execution_to is not None: + _params["executionTo"] = _SERIALIZER.query("execution_to", execution_to, "iso-8601") + if status is not None: + _params["status"] = _SERIALIZER.query("status", status, "str") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_stop_test_run_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}:stop" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_begin_test_profile_run_request( # pylint: disable=name-too-long + test_profile_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs/{testProfileRunId}" + path_format_arguments = { + "testProfileRunId": _SERIALIZER.url("test_profile_run_id", test_profile_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_delete_test_profile_run_request( # pylint: disable=name-too-long + test_profile_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs/{testProfileRunId}" + path_format_arguments = { + "testProfileRunId": _SERIALIZER.url("test_profile_run_id", test_profile_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_test_profile_run_request( # pylint: disable=name-too-long + test_profile_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs/{testProfileRunId}" + path_format_arguments = { + "testProfileRunId": _SERIALIZER.url("test_profile_run_id", test_profile_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_list_test_profile_runs_request( # pylint: disable=name-too-long + *, + maxpagesize: Optional[int] = None, + min_start_date_time: Optional[datetime.datetime] = None, + max_start_date_time: Optional[datetime.datetime] = None, + min_end_date_time: Optional[datetime.datetime] = None, + max_end_date_time: Optional[datetime.datetime] = None, + created_date_start_time: Optional[datetime.datetime] = None, + created_date_end_time: Optional[datetime.datetime] = None, + test_profile_run_ids: Optional[List[str]] = None, + test_profile_ids: Optional[List[str]] = None, + statuses: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + if min_start_date_time is not None: + _params["minStartDateTime"] = _SERIALIZER.query("min_start_date_time", min_start_date_time, "iso-8601") + if max_start_date_time is not None: + _params["maxStartDateTime"] = _SERIALIZER.query("max_start_date_time", max_start_date_time, "iso-8601") + if min_end_date_time is not None: + _params["minEndDateTime"] = _SERIALIZER.query("min_end_date_time", min_end_date_time, "iso-8601") + if max_end_date_time is not None: + _params["maxEndDateTime"] = _SERIALIZER.query("max_end_date_time", max_end_date_time, "iso-8601") + if created_date_start_time is not None: + _params["createdDateStartTime"] = _SERIALIZER.query( + "created_date_start_time", created_date_start_time, "iso-8601" + ) + if created_date_end_time is not None: + _params["createdDateEndTime"] = _SERIALIZER.query("created_date_end_time", created_date_end_time, "iso-8601") + if test_profile_run_ids is not None: + _params["testProfileRunIds"] = _SERIALIZER.query("test_profile_run_ids", test_profile_run_ids, "[str]", div=",") + if test_profile_ids is not None: + _params["testProfileIds"] = _SERIALIZER.query("test_profile_ids", test_profile_ids, "[str]", div=",") + if statuses is not None: + _params["statuses"] = _SERIALIZER.query("statuses", statuses, "[str]", div=",") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_stop_test_profile_run_request( # pylint: disable=name-too-long + test_profile_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs/{testProfileRunId}:stop" + path_format_arguments = { + "testProfileRunId": _SERIALIZER.url("test_profile_run_id", test_profile_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class _LoadTestAdministrationClientOperationsMixin( + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], LoadTestAdministrationClientConfiguration] +): + + @overload + def create_or_update_test( + self, + test_id: str, + body: _microsoft_loadtestservice_models4.Test, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: ~microsoft.loadtestservice.models.Test + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_test( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_test( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_test( + self, test_id: str, body: Union[_microsoft_loadtestservice_models4.Test, JSON, IO[bytes]], **kwargs: Any + ) -> _microsoft_loadtestservice_models4.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Is one of the following types: Test, JSON, IO[bytes] + Required. + :type body: ~microsoft.loadtestservice.models.Test or JSON or IO[bytes] + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.Test] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_test_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.Test, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update_app_components( + self, + test_id: str, + body: _microsoft_loadtestservice_models4.TestAppComponents, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: ~microsoft.loadtestservice.models.TestAppComponents + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_app_components( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_app_components( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_app_components( + self, + test_id: str, + body: Union[_microsoft_loadtestservice_models4.TestAppComponents, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Is one of the following types: TestAppComponents, JSON, + IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestAppComponents or JSON or IO[bytes] + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestAppComponents] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_app_components_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update_server_metrics_config( + self, + test_id: str, + body: _microsoft_loadtestservice_models4.TestServerMetricsConfiguration, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_server_metrics_config( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_server_metrics_config( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_server_metrics_config( + self, + test_id: str, + body: Union[_microsoft_loadtestservice_models4.TestServerMetricsConfiguration, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Is one of the following types: + TestServerMetricsConfiguration, JSON, IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration or JSON or + IO[bytes] + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestServerMetricsConfiguration] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_server_metrics_config_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models4.TestServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_app_components(self, test_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Get associated app component (collection of azure resources) for the given test. + + Get associated app component (collection of azure resources) for the given test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestAppComponents] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_app_components_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_server_metrics_config( + self, test_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """List server metrics configuration for the given test. + + List server metrics configuration for the given test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestServerMetricsConfiguration] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_server_metrics_config_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models4.TestServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_test(self, test_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.Test: + """Get load test details by test Id. + + Get load test details by test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.Test] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.Test, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_test_file( + self, test_id: str, file_name: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestFileInfo: + """Get all the files that are associated with a test. + + Get all the files that are associated with a test. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: TestFileInfo. The TestFileInfo is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestFileInfo] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_file_request( + test_id=test_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_test_files( + self, test_id: str, **kwargs: Any + ) -> ItemPaged["_microsoft_loadtestservice_models4.TestFileInfo"]: + """Get all test files. + + Get all test files. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: An iterator like instance of TestFileInfo + :rtype: ~azure.core.paging.ItemPaged[~microsoft.loadtestservice.models.TestFileInfo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_microsoft_loadtestservice_models4.TestFileInfo]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_test_files_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models4.TestFileInfo], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_tests( + self, + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> ItemPaged["_microsoft_loadtestservice_models4.Test"]: + """Get all load tests by the fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + + Get all load tests by the fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + + :keyword orderby: Sort on the supported fields in (field asc/desc) format. eg: + lastModifiedDateTime asc. Supported fields - lastModifiedDateTime. Default value is None. + :paramtype orderby: str + :keyword search: Prefix based, case sensitive search on searchable fields - displayName, + createdBy. For example, to search for a test, with display name is Login Test, + the search parameter can be Login. Default value is None. + :paramtype search: str + :keyword last_modified_start_time: Start DateTime(RFC 3339 literal format) of the last updated + time range to filter tests. Default value is None. + :paramtype last_modified_start_time: ~datetime.datetime + :keyword last_modified_end_time: End DateTime(RFC 3339 literal format) of the last updated time + range to filter tests. Default value is None. + :paramtype last_modified_end_time: ~datetime.datetime + :return: An iterator like instance of Test + :rtype: ~azure.core.paging.ItemPaged[~microsoft.loadtestservice.models.Test] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models4.Test]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_tests_request( + orderby=orderby, + search=search, + last_modified_start_time=last_modified_start_time, + last_modified_end_time=last_modified_end_time, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_microsoft_loadtestservice_models4.Test], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def _begin_upload_test_file( + self, + test_id: str, + file_name: str, + body: bytes, + *, + file_type: Optional[Union[str, _microsoft_loadtestservice_models4.FileType]] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestFileInfo: + """Upload input file for a given test Id. File size can't be more than 50 MB. + Existing file with same name for the given test will be overwritten. File + should be provided in the request body as application/octet-stream. + + Upload input file for a given test Id. File size can't be more than 50 MB. + Existing file with same name for the given test will be overwritten. File + should be provided in the request body as application/octet-stream. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Unique name for test file with file extension like : App.jmx. Required. + :type file_name: str + :param body: The file content as application/octet-stream. Required. + :type body: bytes + :keyword file_type: File type. Known values are: "JMX_FILE", "USER_PROPERTIES", + "ADDITIONAL_ARTIFACTS", "ZIPPED_ARTIFACTS", "URL_TEST_CONFIG", and "TEST_SCRIPT". Default value + is None. + :paramtype file_type: str or ~microsoft.loadtestservice.models.FileType + :return: TestFileInfo. The TestFileInfo is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("content-type", "application/octet-stream")) + cls: ClsType[_microsoft_loadtestservice_models4.TestFileInfo] = kwargs.pop("cls", None) + + _content = body + + _request = build_load_test_administration_begin_upload_test_file_request( + test_id=test_id, + file_name=file_name, + file_type=file_type, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_test_file( # pylint: disable=inconsistent-return-statements + self, test_id: str, file_name: str, **kwargs: Any + ) -> None: + """Delete file by the file name for a test. + + Delete file by the file name for a test. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_file_request( + test_id=test_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def delete_test(self, test_id: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements + """Delete a test by its test Id. + + Delete a test by its test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + def create_or_update_test_profile( + self, + test_profile_id: str, + body: _microsoft_loadtestservice_models4.TestProfile, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: ~microsoft.loadtestservice.models.TestProfile + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_test_profile( + self, test_profile_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_test_profile( + self, + test_profile_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "content_type", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def create_or_update_test_profile( + self, + test_profile_id: str, + body: Union[_microsoft_loadtestservice_models4.TestProfile, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Is one of the following types: TestProfile, JSON, IO[bytes] + Required. + :type body: ~microsoft.loadtestservice.models.TestProfile or JSON or IO[bytes] + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestProfile] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_test_profile_request( + test_profile_id=test_profile_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfile, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def delete_test_profile( # pylint: disable=inconsistent-return-statements + self, test_profile_id: str, **kwargs: Any + ) -> None: + """Delete a test profile. + + Delete a test profile by its test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_profile_request( + test_profile_id=test_profile_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def get_test_profile(self, test_profile_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.TestProfile: + """Get load test profile details. + + Get load test profile details by test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestProfile] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_profile_request( + test_profile_id=test_profile_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfile, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={ + "2024-05-01-preview": [ + "api_version", + "maxpagesize", + "last_modified_start_time", + "last_modified_end_time", + "test_profile_ids", + "test_ids", + "accept", + ] + }, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def list_test_profiles( + self, + *, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + test_profile_ids: Optional[List[str]] = None, + test_ids: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_microsoft_loadtestservice_models4.TestProfile"]: + """List test profiles. + + Get all test profiles for the given filters. + + :keyword last_modified_start_time: Start DateTime(RFC 3339 literal format) of the last updated + time range to filter test profiles. Default value is None. + :paramtype last_modified_start_time: ~datetime.datetime + :keyword last_modified_end_time: End DateTime(RFC 3339 literal format) of the last updated time + range to filter test profiles. Default value is None. + :paramtype last_modified_end_time: ~datetime.datetime + :keyword test_profile_ids: Comma separated list of IDs of the test profiles to filter. Default + value is None. + :paramtype test_profile_ids: list[str] + :keyword test_ids: Comma separated list IDs of the tests which should be associated with the + test profiles to fetch. Default value is None. + :paramtype test_ids: list[str] + :return: An iterator like instance of TestProfile + :rtype: ~azure.core.paging.ItemPaged[~microsoft.loadtestservice.models.TestProfile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models4.TestProfile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_test_profiles_request( + maxpagesize=maxpagesize, + last_modified_start_time=last_modified_start_time, + last_modified_end_time=last_modified_end_time, + test_profile_ids=test_profile_ids, + test_ids=test_ids, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models4.TestProfile], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class _LoadTestRunClientOperationsMixin( + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], LoadTestRunClientConfiguration] +): + + @overload + def _begin_test_run( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models4.TestRun, + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRun: ... + @overload + def _begin_test_run( + self, + test_run_id: str, + body: JSON, + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRun: ... + @overload + def _begin_test_run( + self, + test_run_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRun: ... + + @distributed_trace + def _begin_test_run( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models4.TestRun, JSON, IO[bytes]], + *, + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRun: + """Create and start a new test run with the given test run Id. + + Create and start a new test run with the given test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: The resource instance. Is one of the following types: TestRun, JSON, IO[bytes] + Required. + :type body: ~microsoft.loadtestservice.models.TestRun or JSON or IO[bytes] + :keyword old_test_run_id: Existing test run identifier that should be rerun, if this is + provided, the + test will run with the JMX file, configuration and app components from the + existing test run. You can override the configuration values for new test run + in the request body. Default value is None. + :paramtype old_test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestRun] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_begin_test_run_request( + test_run_id=test_run_id, + old_test_run_id=old_test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update_app_components( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models4.TestRunAppComponents, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: ~microsoft.loadtestservice.models.TestRunAppComponents + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_app_components( + self, test_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_app_components( + self, test_run_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_app_components( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models4.TestRunAppComponents, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Is one of the following types: TestRunAppComponents, JSON, + IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestRunAppComponents or JSON or IO[bytes] + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestRunAppComponents] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_create_or_update_app_components_request( + test_run_id=test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRunAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update_server_metrics_config( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_server_metrics_config( + self, test_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_server_metrics_config( + self, test_run_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_server_metrics_config( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Is one of the following types: + TestRunServerMetricsConfiguration, JSON, IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration or JSON or + IO[bytes] + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_create_or_update_server_metrics_config_request( + test_run_id=test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_test_run( # pylint: disable=inconsistent-return-statements + self, test_run_id: str, **kwargs: Any + ) -> None: + """Delete an existing load test run. + + Delete an existing load test run by providing the testRunId. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_run_delete_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def get_app_components( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Get associated app component (collection of azure resources) for the given test + run. + + Get associated app component (collection of azure resources) for the given test + run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRunAppComponents] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_app_components_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRunAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_server_metrics_config( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Get associated server metrics configuration for the given test run. + + Get associated server metrics configuration for the given test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_server_metrics_config_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_test_run(self, test_run_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.TestRun: + """Get test run details by test run Id. + + Get test run details by test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_test_run_file( + self, test_run_id: str, file_name: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunFileInfo: + """Get test run file by file name. + + Get test run file by file name. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: TestRunFileInfo. The TestRunFileInfo is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRunFileInfo] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_run_file_request( + test_run_id=test_run_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRunFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_metric_dimension_values( + self, + test_run_id: str, + name: str, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + **kwargs: Any + ) -> ItemPaged[str]: + """List the dimension values for the given metric dimension name. + + List the dimension values for the given metric dimension name. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param name: Dimension name. Required. + :type name: str + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :return: An iterator like instance of str + :rtype: ~azure.core.paging.ItemPaged[str] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[str]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_metric_dimension_values_request( + test_run_id=test_run_id, + name=name, + metric_name=metric_name, + metric_namespace=metric_namespace, + time_interval=time_interval, + interval=interval, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[str], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_metric_definitions( + self, test_run_id: str, *, metric_namespace: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.MetricDefinitionCollection: + """List the metric definitions for a load test run. + + List the metric definitions for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :return: MetricDefinitionCollection. The MetricDefinitionCollection is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.MetricDefinitionCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.MetricDefinitionCollection] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_metric_definitions_request( + test_run_id=test_run_id, + metric_namespace=metric_namespace, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.MetricDefinitionCollection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_metric_namespaces( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.MetricNamespaceCollection: + """List the metric namespaces for a load test run. + + List the metric namespaces for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: MetricNamespaceCollection. The MetricNamespaceCollection is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.MetricNamespaceCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.MetricNamespaceCollection] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_metric_namespaces_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.MetricNamespaceCollection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[_microsoft_loadtestservice_models4.MetricRequestPayload] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> ItemPaged["_microsoft_loadtestservice_models4.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: ~microsoft.loadtestservice.models.MetricRequestPayload + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.paging.ItemPaged[~microsoft.loadtestservice.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[JSON] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> ItemPaged["_microsoft_loadtestservice_models4.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: JSON + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.paging.ItemPaged[~microsoft.loadtestservice.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[IO[bytes]] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> ItemPaged["_microsoft_loadtestservice_models4.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: IO[bytes] + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.paging.ItemPaged[~microsoft.loadtestservice.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def list_metrics( + self, + test_run_id: str, + body: Optional[Union[_microsoft_loadtestservice_models4.MetricRequestPayload, JSON, IO[bytes]]] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + **kwargs: Any + ) -> ItemPaged["_microsoft_loadtestservice_models4.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Is one of the following types: MetricRequestPayload, + JSON, IO[bytes] Default value is None. + :type body: ~microsoft.loadtestservice.models.MetricRequestPayload or JSON or IO[bytes] + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.paging.ItemPaged[~microsoft.loadtestservice.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[List[_microsoft_loadtestservice_models4.TimeSeriesElement]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_metrics_request( + test_run_id=test_run_id, + metric_name=metric_name, + metric_namespace=metric_namespace, + time_interval=time_interval, + aggregation=aggregation, + interval=interval, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models4.TimeSeriesElement], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_test_runs( + self, + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + test_id: Optional[str] = None, + execution_from: Optional[datetime.datetime] = None, + execution_to: Optional[datetime.datetime] = None, + status: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_microsoft_loadtestservice_models4.TestRun"]: + """Get all test runs for the given filters. + + Get all test runs for the given filters. + + :keyword orderby: Sort on the supported fields in (field asc/desc) format. eg: executedDateTime + asc. Supported fields - executedDateTime. Default value is None. + :paramtype orderby: str + :keyword search: Prefix based, case sensitive search on searchable fields - description, + executedUser. For example, to search for a test run, with description 500 VUs, + the search parameter can be 500. Default value is None. + :paramtype search: str + :keyword test_id: Unique name of an existing load test. Default value is None. + :paramtype test_id: str + :keyword execution_from: Start DateTime(RFC 3339 literal format) of test-run execution time + filter range. Default value is None. + :paramtype execution_from: ~datetime.datetime + :keyword execution_to: End DateTime(RFC 3339 literal format) of test-run execution time filter + range. Default value is None. + :paramtype execution_to: ~datetime.datetime + :keyword status: Comma separated list of test run status. Default value is None. + :paramtype status: str + :return: An iterator like instance of TestRun + :rtype: ~azure.core.paging.ItemPaged[~microsoft.loadtestservice.models.TestRun] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models4.TestRun]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_test_runs_request( + orderby=orderby, + search=search, + test_id=test_id, + execution_from=execution_from, + execution_to=execution_to, + status=status, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_microsoft_loadtestservice_models4.TestRun], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def stop_test_run(self, test_run_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.TestRun: + """Stop test run by test run Id. + + Stop test run by test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_stop_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: _microsoft_loadtestservice_models4.TestProfileRun, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: ... + @overload + def _begin_test_profile_run( + self, test_profile_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: ... + @overload + def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: ... + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "content_type", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: Union[_microsoft_loadtestservice_models4.TestProfileRun, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: + """Create and start a new test profile run. + + Create and start a new test profile run with the given test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :param body: The resource instance. Is one of the following types: TestProfileRun, JSON, + IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestProfileRun or JSON or IO[bytes] + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestProfileRun] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_begin_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def delete_test_profile_run( # pylint: disable=inconsistent-return-statements + self, test_profile_run_id: str, **kwargs: Any + ) -> None: + """Delete an existing load test profile run. + + Delete an existing load test profile run by providing the test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_run_delete_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def get_test_profile_run( + self, test_profile_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: + """Get test profile run details. + + Get test profile run details by test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestProfileRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={ + "2024-05-01-preview": [ + "api_version", + "maxpagesize", + "min_start_date_time", + "max_start_date_time", + "min_end_date_time", + "max_end_date_time", + "created_date_start_time", + "created_date_end_time", + "test_profile_run_ids", + "test_profile_ids", + "statuses", + "accept", + ] + }, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def list_test_profile_runs( + self, + *, + min_start_date_time: Optional[datetime.datetime] = None, + max_start_date_time: Optional[datetime.datetime] = None, + min_end_date_time: Optional[datetime.datetime] = None, + max_end_date_time: Optional[datetime.datetime] = None, + created_date_start_time: Optional[datetime.datetime] = None, + created_date_end_time: Optional[datetime.datetime] = None, + test_profile_run_ids: Optional[List[str]] = None, + test_profile_ids: Optional[List[str]] = None, + statuses: Optional[List[str]] = None, + **kwargs: Any + ) -> ItemPaged["_microsoft_loadtestservice_models4.TestProfileRun"]: + """List test profile runs. + + Get all test profile runs for the given filters. + + :keyword min_start_date_time: Minimum Start DateTime(RFC 3339 literal format) of the test + profile runs to filter on. Default value is None. + :paramtype min_start_date_time: ~datetime.datetime + :keyword max_start_date_time: Maximum Start DateTime(RFC 3339 literal format) of the test + profile runs to filter on. Default value is None. + :paramtype max_start_date_time: ~datetime.datetime + :keyword min_end_date_time: Minimum End DateTime(RFC 3339 literal format) of the test profile + runs to filter on. Default value is None. + :paramtype min_end_date_time: ~datetime.datetime + :keyword max_end_date_time: Maximum End DateTime(RFC 3339 literal format) of the test profile + runs to filter on. Default value is None. + :paramtype max_end_date_time: ~datetime.datetime + :keyword created_date_start_time: Start DateTime(RFC 3339 literal format) of the created time + range to filter test profile runs. Default value is None. + :paramtype created_date_start_time: ~datetime.datetime + :keyword created_date_end_time: End DateTime(RFC 3339 literal format) of the created time range + to filter test profile runs. Default value is None. + :paramtype created_date_end_time: ~datetime.datetime + :keyword test_profile_run_ids: Comma separated list of IDs of the test profile runs to filter. + Default value is None. + :paramtype test_profile_run_ids: list[str] + :keyword test_profile_ids: Comma separated IDs of the test profiles which should be associated + with the test profile runs to fetch. Default value is None. + :paramtype test_profile_ids: list[str] + :keyword statuses: Comma separated list of Statuses of the test profile runs to filter. Default + value is None. + :paramtype statuses: list[str] + :return: An iterator like instance of TestProfileRun + :rtype: ~azure.core.paging.ItemPaged[~microsoft.loadtestservice.models.TestProfileRun] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models4.TestProfileRun]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_test_profile_runs_request( + maxpagesize=maxpagesize, + min_start_date_time=min_start_date_time, + max_start_date_time=max_start_date_time, + min_end_date_time=min_end_date_time, + max_end_date_time=max_end_date_time, + created_date_start_time=created_date_start_time, + created_date_end_time=created_date_end_time, + test_profile_run_ids=test_profile_run_ids, + test_profile_ids=test_profile_ids, + statuses=statuses, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models4.TestProfileRun], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def stop_test_profile_run( + self, test_profile_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: + """Stop test profile run. + + Stop test profile run for the given test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestProfileRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_stop_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/model_base.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/model_base.py new file mode 100644 index 000000000000..49d5c7259389 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/model_base.py @@ -0,0 +1,1232 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, value, module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + value: typing.Any, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, value) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/serialization.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/serialization.py new file mode 100644 index 000000000000..eb86ea23c965 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/serialization.py @@ -0,0 +1,2032 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/utils.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/utils.py new file mode 100644 index 000000000000..35c9c836f85f --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_utils/utils.py @@ -0,0 +1,25 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import Generic, TYPE_CHECKING, TypeVar + +if TYPE_CHECKING: + from .serialization import Deserializer, Serializer + + +TClient = TypeVar("TClient") +TConfig = TypeVar("TConfig") + + +class ClientMixinABC(ABC, Generic[TClient, TConfig]): + """DO NOT use this class. It is for internal typing use only.""" + + _client: TClient + _config: TConfig + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_validation.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_validation.py new file mode 100644 index 000000000000..f5af3a4eb8a2 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_validation.py @@ -0,0 +1,66 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import functools + + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if _index_with_default(method_added_on) > _index_with_default(client_api_version): + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) + } + if unsupported: + raise ValueError( + "".join( + [ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ] + ) + ) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_version.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/__init__.py new file mode 100644 index 000000000000..da6781129f9e --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/__init__.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import LoadTestAdministrationClient # type: ignore +from ._client import LoadTestRunClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "LoadTestAdministrationClient", + "LoadTestRunClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_client.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_client.py new file mode 100644 index 000000000000..7f136d8bb201 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_client.py @@ -0,0 +1,180 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import LoadTestAdministrationClientConfiguration, LoadTestRunClientConfiguration +from ._operations._operations import _LoadTestAdministrationClientOperationsMixin, _LoadTestRunClientOperationsMixin + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class LoadTestAdministrationClient(_LoadTestAdministrationClientOperationsMixin): + """LoadTestAdministrationClient. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "https://{endpoint}" + self._config = LoadTestAdministrationClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) + + +class LoadTestRunClient(_LoadTestRunClientOperationsMixin): + """LoadTestRunClient. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "https://{endpoint}" + self._config = LoadTestRunClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_configuration.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_configuration.py new file mode 100644 index 000000000000..9a77a5e7a3e4 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_configuration.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class LoadTestAdministrationClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long + """Configuration for LoadTestAdministrationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cnt-prod.loadtesting.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "developer-loadtesting/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) + + +class LoadTestRunClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for LoadTestRunClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cnt-prod.loadtesting.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "developer-loadtesting/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_operations.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_operations.py new file mode 100644 index 000000000000..ac1fcbb3a8a9 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_operations.py @@ -0,0 +1,3492 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +import datetime +from io import IOBase +import json +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core import AsyncPipelineClient +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ....microsoft.loadtestservice import models as _microsoft_loadtestservice_models5 +from ..._operations._operations import ( + build_load_test_administration_begin_upload_test_file_request, + build_load_test_administration_create_or_update_app_components_request, + build_load_test_administration_create_or_update_server_metrics_config_request, + build_load_test_administration_create_or_update_test_profile_request, + build_load_test_administration_create_or_update_test_request, + build_load_test_administration_delete_test_file_request, + build_load_test_administration_delete_test_profile_request, + build_load_test_administration_delete_test_request, + build_load_test_administration_get_app_components_request, + build_load_test_administration_get_server_metrics_config_request, + build_load_test_administration_get_test_file_request, + build_load_test_administration_get_test_profile_request, + build_load_test_administration_get_test_request, + build_load_test_administration_list_test_files_request, + build_load_test_administration_list_test_profiles_request, + build_load_test_administration_list_tests_request, + build_load_test_run_begin_test_profile_run_request, + build_load_test_run_begin_test_run_request, + build_load_test_run_create_or_update_app_components_request, + build_load_test_run_create_or_update_server_metrics_config_request, + build_load_test_run_delete_test_profile_run_request, + build_load_test_run_delete_test_run_request, + build_load_test_run_get_app_components_request, + build_load_test_run_get_metric_definitions_request, + build_load_test_run_get_metric_namespaces_request, + build_load_test_run_get_server_metrics_config_request, + build_load_test_run_get_test_profile_run_request, + build_load_test_run_get_test_run_file_request, + build_load_test_run_get_test_run_request, + build_load_test_run_list_metric_dimension_values_request, + build_load_test_run_list_metrics_request, + build_load_test_run_list_test_profile_runs_request, + build_load_test_run_list_test_runs_request, + build_load_test_run_stop_test_profile_run_request, + build_load_test_run_stop_test_run_request, +) +from ..._utils.model_base import SdkJSONEncoder, _deserialize +from ..._utils.utils import ClientMixinABC +from ..._validation import api_version_validation +from .._configuration import LoadTestAdministrationClientConfiguration, LoadTestRunClientConfiguration + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class _LoadTestAdministrationClientOperationsMixin( + ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], LoadTestAdministrationClientConfiguration] +): + + @overload + async def create_or_update_test( + self, + test_id: str, + body: _microsoft_loadtestservice_models5.Test, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: ~microsoft.loadtestservice.models.Test + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_test( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_test( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_test( + self, test_id: str, body: Union[_microsoft_loadtestservice_models5.Test, JSON, IO[bytes]], **kwargs: Any + ) -> _microsoft_loadtestservice_models5.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Is one of the following types: Test, JSON, IO[bytes] + Required. + :type body: ~microsoft.loadtestservice.models.Test or JSON or IO[bytes] + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.Test] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_test_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.Test, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update_app_components( + self, + test_id: str, + body: _microsoft_loadtestservice_models5.TestAppComponents, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: ~microsoft.loadtestservice.models.TestAppComponents + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_app_components( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_app_components( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_app_components( + self, + test_id: str, + body: Union[_microsoft_loadtestservice_models5.TestAppComponents, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Is one of the following types: TestAppComponents, JSON, + IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestAppComponents or JSON or IO[bytes] + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestAppComponents] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_app_components_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update_server_metrics_config( + self, + test_id: str, + body: _microsoft_loadtestservice_models5.TestServerMetricsConfiguration, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_server_metrics_config( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_server_metrics_config( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_server_metrics_config( + self, + test_id: str, + body: Union[_microsoft_loadtestservice_models5.TestServerMetricsConfiguration, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Is one of the following types: + TestServerMetricsConfiguration, JSON, IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration or JSON or + IO[bytes] + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestServerMetricsConfiguration] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_server_metrics_config_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models5.TestServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_app_components( + self, test_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Get associated app component (collection of azure resources) for the given test. + + Get associated app component (collection of azure resources) for the given test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestAppComponents] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_app_components_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_server_metrics_config( + self, test_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """List server metrics configuration for the given test. + + List server metrics configuration for the given test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestServerMetricsConfiguration] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_server_metrics_config_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models5.TestServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_test(self, test_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models5.Test: + """Get load test details by test Id. + + Get load test details by test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.Test] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.Test, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_test_file( + self, test_id: str, file_name: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestFileInfo: + """Get all the files that are associated with a test. + + Get all the files that are associated with a test. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: TestFileInfo. The TestFileInfo is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestFileInfo] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_file_request( + test_id=test_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_test_files( + self, test_id: str, **kwargs: Any + ) -> AsyncItemPaged["_microsoft_loadtestservice_models5.TestFileInfo"]: + """Get all test files. + + Get all test files. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: An iterator like instance of TestFileInfo + :rtype: ~azure.core.async_paging.AsyncItemPaged[~microsoft.loadtestservice.models.TestFileInfo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_microsoft_loadtestservice_models5.TestFileInfo]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_test_files_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models5.TestFileInfo], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_tests( + self, + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> AsyncItemPaged["_microsoft_loadtestservice_models5.Test"]: + """Get all load tests by the fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + + Get all load tests by the fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + + :keyword orderby: Sort on the supported fields in (field asc/desc) format. eg: + lastModifiedDateTime asc. Supported fields - lastModifiedDateTime. Default value is None. + :paramtype orderby: str + :keyword search: Prefix based, case sensitive search on searchable fields - displayName, + createdBy. For example, to search for a test, with display name is Login Test, + the search parameter can be Login. Default value is None. + :paramtype search: str + :keyword last_modified_start_time: Start DateTime(RFC 3339 literal format) of the last updated + time range to filter tests. Default value is None. + :paramtype last_modified_start_time: ~datetime.datetime + :keyword last_modified_end_time: End DateTime(RFC 3339 literal format) of the last updated time + range to filter tests. Default value is None. + :paramtype last_modified_end_time: ~datetime.datetime + :return: An iterator like instance of Test + :rtype: ~azure.core.async_paging.AsyncItemPaged[~microsoft.loadtestservice.models.Test] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models5.Test]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_tests_request( + orderby=orderby, + search=search, + last_modified_start_time=last_modified_start_time, + last_modified_end_time=last_modified_end_time, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_microsoft_loadtestservice_models5.Test], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def _begin_upload_test_file( + self, + test_id: str, + file_name: str, + body: bytes, + *, + file_type: Optional[Union[str, _microsoft_loadtestservice_models5.FileType]] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestFileInfo: + """Upload input file for a given test Id. File size can't be more than 50 MB. + Existing file with same name for the given test will be overwritten. File + should be provided in the request body as application/octet-stream. + + Upload input file for a given test Id. File size can't be more than 50 MB. + Existing file with same name for the given test will be overwritten. File + should be provided in the request body as application/octet-stream. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Unique name for test file with file extension like : App.jmx. Required. + :type file_name: str + :param body: The file content as application/octet-stream. Required. + :type body: bytes + :keyword file_type: File type. Known values are: "JMX_FILE", "USER_PROPERTIES", + "ADDITIONAL_ARTIFACTS", "ZIPPED_ARTIFACTS", "URL_TEST_CONFIG", and "TEST_SCRIPT". Default value + is None. + :paramtype file_type: str or ~microsoft.loadtestservice.models.FileType + :return: TestFileInfo. The TestFileInfo is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("content-type", "application/octet-stream")) + cls: ClsType[_microsoft_loadtestservice_models5.TestFileInfo] = kwargs.pop("cls", None) + + _content = body + + _request = build_load_test_administration_begin_upload_test_file_request( + test_id=test_id, + file_name=file_name, + file_type=file_type, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_test_file(self, test_id: str, file_name: str, **kwargs: Any) -> None: + """Delete file by the file name for a test. + + Delete file by the file name for a test. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_file_request( + test_id=test_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def delete_test(self, test_id: str, **kwargs: Any) -> None: + """Delete a test by its test Id. + + Delete a test by its test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + async def create_or_update_test_profile( + self, + test_profile_id: str, + body: _microsoft_loadtestservice_models5.TestProfile, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: ~microsoft.loadtestservice.models.TestProfile + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_test_profile( + self, test_profile_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_test_profile( + self, + test_profile_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "content_type", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + async def create_or_update_test_profile( + self, + test_profile_id: str, + body: Union[_microsoft_loadtestservice_models5.TestProfile, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Is one of the following types: TestProfile, JSON, IO[bytes] + Required. + :type body: ~microsoft.loadtestservice.models.TestProfile or JSON or IO[bytes] + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestProfile] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_test_profile_request( + test_profile_id=test_profile_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfile, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + async def delete_test_profile(self, test_profile_id: str, **kwargs: Any) -> None: + """Delete a test profile. + + Delete a test profile by its test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_profile_request( + test_profile_id=test_profile_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + async def get_test_profile( + self, test_profile_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Get load test profile details. + + Get load test profile details by test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestProfile] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_profile_request( + test_profile_id=test_profile_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfile, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={ + "2024-05-01-preview": [ + "api_version", + "maxpagesize", + "last_modified_start_time", + "last_modified_end_time", + "test_profile_ids", + "test_ids", + "accept", + ] + }, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def list_test_profiles( + self, + *, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + test_profile_ids: Optional[List[str]] = None, + test_ids: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_microsoft_loadtestservice_models5.TestProfile"]: + """List test profiles. + + Get all test profiles for the given filters. + + :keyword last_modified_start_time: Start DateTime(RFC 3339 literal format) of the last updated + time range to filter test profiles. Default value is None. + :paramtype last_modified_start_time: ~datetime.datetime + :keyword last_modified_end_time: End DateTime(RFC 3339 literal format) of the last updated time + range to filter test profiles. Default value is None. + :paramtype last_modified_end_time: ~datetime.datetime + :keyword test_profile_ids: Comma separated list of IDs of the test profiles to filter. Default + value is None. + :paramtype test_profile_ids: list[str] + :keyword test_ids: Comma separated list IDs of the tests which should be associated with the + test profiles to fetch. Default value is None. + :paramtype test_ids: list[str] + :return: An iterator like instance of TestProfile + :rtype: ~azure.core.async_paging.AsyncItemPaged[~microsoft.loadtestservice.models.TestProfile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models5.TestProfile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_test_profiles_request( + maxpagesize=maxpagesize, + last_modified_start_time=last_modified_start_time, + last_modified_end_time=last_modified_end_time, + test_profile_ids=test_profile_ids, + test_ids=test_ids, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models5.TestProfile], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class _LoadTestRunClientOperationsMixin( + ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], LoadTestRunClientConfiguration] +): + + @overload + async def _begin_test_run( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models5.TestRun, + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRun: ... + @overload + async def _begin_test_run( + self, + test_run_id: str, + body: JSON, + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRun: ... + @overload + async def _begin_test_run( + self, + test_run_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRun: ... + + @distributed_trace_async + async def _begin_test_run( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models5.TestRun, JSON, IO[bytes]], + *, + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRun: + """Create and start a new test run with the given test run Id. + + Create and start a new test run with the given test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: The resource instance. Is one of the following types: TestRun, JSON, IO[bytes] + Required. + :type body: ~microsoft.loadtestservice.models.TestRun or JSON or IO[bytes] + :keyword old_test_run_id: Existing test run identifier that should be rerun, if this is + provided, the + test will run with the JMX file, configuration and app components from the + existing test run. You can override the configuration values for new test run + in the request body. Default value is None. + :paramtype old_test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestRun] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_begin_test_run_request( + test_run_id=test_run_id, + old_test_run_id=old_test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update_app_components( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models5.TestRunAppComponents, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: ~microsoft.loadtestservice.models.TestRunAppComponents + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_app_components( + self, test_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_app_components( + self, test_run_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_app_components( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models5.TestRunAppComponents, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Is one of the following types: TestRunAppComponents, JSON, + IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestRunAppComponents or JSON or IO[bytes] + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestRunAppComponents] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_create_or_update_app_components_request( + test_run_id=test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRunAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update_server_metrics_config( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_server_metrics_config( + self, test_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_server_metrics_config( + self, test_run_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_server_metrics_config( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Is one of the following types: + TestRunServerMetricsConfiguration, JSON, IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration or JSON or + IO[bytes] + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_create_or_update_server_metrics_config_request( + test_run_id=test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_test_run(self, test_run_id: str, **kwargs: Any) -> None: + """Delete an existing load test run. + + Delete an existing load test run by providing the testRunId. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_run_delete_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def get_app_components( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Get associated app component (collection of azure resources) for the given test + run. + + Get associated app component (collection of azure resources) for the given test + run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRunAppComponents] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_app_components_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRunAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_server_metrics_config( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Get associated server metrics configuration for the given test run. + + Get associated server metrics configuration for the given test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_server_metrics_config_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_test_run(self, test_run_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models5.TestRun: + """Get test run details by test run Id. + + Get test run details by test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_test_run_file( + self, test_run_id: str, file_name: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunFileInfo: + """Get test run file by file name. + + Get test run file by file name. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: TestRunFileInfo. The TestRunFileInfo is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRunFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRunFileInfo] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_run_file_request( + test_run_id=test_run_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRunFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_metric_dimension_values( + self, + test_run_id: str, + name: str, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + **kwargs: Any + ) -> AsyncItemPaged[str]: + """List the dimension values for the given metric dimension name. + + List the dimension values for the given metric dimension name. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param name: Dimension name. Required. + :type name: str + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :return: An iterator like instance of str + :rtype: ~azure.core.async_paging.AsyncItemPaged[str] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[str]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_metric_dimension_values_request( + test_run_id=test_run_id, + name=name, + metric_name=metric_name, + metric_namespace=metric_namespace, + time_interval=time_interval, + interval=interval, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[str], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_metric_definitions( + self, test_run_id: str, *, metric_namespace: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.MetricDefinitionCollection: + """List the metric definitions for a load test run. + + List the metric definitions for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :return: MetricDefinitionCollection. The MetricDefinitionCollection is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.MetricDefinitionCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.MetricDefinitionCollection] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_metric_definitions_request( + test_run_id=test_run_id, + metric_namespace=metric_namespace, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.MetricDefinitionCollection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_metric_namespaces( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.MetricNamespaceCollection: + """List the metric namespaces for a load test run. + + List the metric namespaces for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: MetricNamespaceCollection. The MetricNamespaceCollection is compatible with + MutableMapping + :rtype: ~microsoft.loadtestservice.models.MetricNamespaceCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.MetricNamespaceCollection] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_metric_namespaces_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.MetricNamespaceCollection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[_microsoft_loadtestservice_models5.MetricRequestPayload] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncItemPaged["_microsoft_loadtestservice_models5.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: ~microsoft.loadtestservice.models.MetricRequestPayload + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~microsoft.loadtestservice.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[JSON] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncItemPaged["_microsoft_loadtestservice_models5.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: JSON + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~microsoft.loadtestservice.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[IO[bytes]] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncItemPaged["_microsoft_loadtestservice_models5.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: IO[bytes] + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~microsoft.loadtestservice.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def list_metrics( + self, + test_run_id: str, + body: Optional[Union[_microsoft_loadtestservice_models5.MetricRequestPayload, JSON, IO[bytes]]] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_microsoft_loadtestservice_models5.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Is one of the following types: MetricRequestPayload, + JSON, IO[bytes] Default value is None. + :type body: ~microsoft.loadtestservice.models.MetricRequestPayload or JSON or IO[bytes] + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~microsoft.loadtestservice.models.TimeGrain + :return: An iterator like instance of TimeSeriesElement + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~microsoft.loadtestservice.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[List[_microsoft_loadtestservice_models5.TimeSeriesElement]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_metrics_request( + test_run_id=test_run_id, + metric_name=metric_name, + metric_namespace=metric_namespace, + time_interval=time_interval, + aggregation=aggregation, + interval=interval, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models5.TimeSeriesElement], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_test_runs( + self, + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + test_id: Optional[str] = None, + execution_from: Optional[datetime.datetime] = None, + execution_to: Optional[datetime.datetime] = None, + status: Optional[str] = None, + **kwargs: Any + ) -> AsyncItemPaged["_microsoft_loadtestservice_models5.TestRun"]: + """Get all test runs for the given filters. + + Get all test runs for the given filters. + + :keyword orderby: Sort on the supported fields in (field asc/desc) format. eg: executedDateTime + asc. Supported fields - executedDateTime. Default value is None. + :paramtype orderby: str + :keyword search: Prefix based, case sensitive search on searchable fields - description, + executedUser. For example, to search for a test run, with description 500 VUs, + the search parameter can be 500. Default value is None. + :paramtype search: str + :keyword test_id: Unique name of an existing load test. Default value is None. + :paramtype test_id: str + :keyword execution_from: Start DateTime(RFC 3339 literal format) of test-run execution time + filter range. Default value is None. + :paramtype execution_from: ~datetime.datetime + :keyword execution_to: End DateTime(RFC 3339 literal format) of test-run execution time filter + range. Default value is None. + :paramtype execution_to: ~datetime.datetime + :keyword status: Comma separated list of test run status. Default value is None. + :paramtype status: str + :return: An iterator like instance of TestRun + :rtype: ~azure.core.async_paging.AsyncItemPaged[~microsoft.loadtestservice.models.TestRun] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models5.TestRun]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_test_runs_request( + orderby=orderby, + search=search, + test_id=test_id, + execution_from=execution_from, + execution_to=execution_to, + status=status, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_microsoft_loadtestservice_models5.TestRun], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def stop_test_run(self, test_run_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models5.TestRun: + """Stop test run by test run Id. + + Stop test run by test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_stop_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: _microsoft_loadtestservice_models5.TestProfileRun, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: ... + @overload + async def _begin_test_profile_run( + self, test_profile_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: ... + @overload + async def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: ... + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "content_type", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + async def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: Union[_microsoft_loadtestservice_models5.TestProfileRun, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: + """Create and start a new test profile run. + + Create and start a new test profile run with the given test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :param body: The resource instance. Is one of the following types: TestProfileRun, JSON, + IO[bytes] Required. + :type body: ~microsoft.loadtestservice.models.TestProfileRun or JSON or IO[bytes] + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestProfileRun] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_begin_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + async def delete_test_profile_run(self, test_profile_run_id: str, **kwargs: Any) -> None: + """Delete an existing load test profile run. + + Delete an existing load test profile run by providing the test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_run_delete_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + async def get_test_profile_run( + self, test_profile_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: + """Get test profile run details. + + Get test profile run details by test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestProfileRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={ + "2024-05-01-preview": [ + "api_version", + "maxpagesize", + "min_start_date_time", + "max_start_date_time", + "min_end_date_time", + "max_end_date_time", + "created_date_start_time", + "created_date_end_time", + "test_profile_run_ids", + "test_profile_ids", + "statuses", + "accept", + ] + }, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + def list_test_profile_runs( + self, + *, + min_start_date_time: Optional[datetime.datetime] = None, + max_start_date_time: Optional[datetime.datetime] = None, + min_end_date_time: Optional[datetime.datetime] = None, + max_end_date_time: Optional[datetime.datetime] = None, + created_date_start_time: Optional[datetime.datetime] = None, + created_date_end_time: Optional[datetime.datetime] = None, + test_profile_run_ids: Optional[List[str]] = None, + test_profile_ids: Optional[List[str]] = None, + statuses: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncItemPaged["_microsoft_loadtestservice_models5.TestProfileRun"]: + """List test profile runs. + + Get all test profile runs for the given filters. + + :keyword min_start_date_time: Minimum Start DateTime(RFC 3339 literal format) of the test + profile runs to filter on. Default value is None. + :paramtype min_start_date_time: ~datetime.datetime + :keyword max_start_date_time: Maximum Start DateTime(RFC 3339 literal format) of the test + profile runs to filter on. Default value is None. + :paramtype max_start_date_time: ~datetime.datetime + :keyword min_end_date_time: Minimum End DateTime(RFC 3339 literal format) of the test profile + runs to filter on. Default value is None. + :paramtype min_end_date_time: ~datetime.datetime + :keyword max_end_date_time: Maximum End DateTime(RFC 3339 literal format) of the test profile + runs to filter on. Default value is None. + :paramtype max_end_date_time: ~datetime.datetime + :keyword created_date_start_time: Start DateTime(RFC 3339 literal format) of the created time + range to filter test profile runs. Default value is None. + :paramtype created_date_start_time: ~datetime.datetime + :keyword created_date_end_time: End DateTime(RFC 3339 literal format) of the created time range + to filter test profile runs. Default value is None. + :paramtype created_date_end_time: ~datetime.datetime + :keyword test_profile_run_ids: Comma separated list of IDs of the test profile runs to filter. + Default value is None. + :paramtype test_profile_run_ids: list[str] + :keyword test_profile_ids: Comma separated IDs of the test profiles which should be associated + with the test profile runs to fetch. Default value is None. + :paramtype test_profile_ids: list[str] + :keyword statuses: Comma separated list of Statuses of the test profile runs to filter. Default + value is None. + :paramtype statuses: list[str] + :return: An iterator like instance of TestProfileRun + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~microsoft.loadtestservice.models.TestProfileRun] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models5.TestProfileRun]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_test_profile_runs_request( + maxpagesize=maxpagesize, + min_start_date_time=min_start_date_time, + max_start_date_time=max_start_date_time, + min_end_date_time=min_end_date_time, + max_end_date_time=max_end_date_time, + created_date_start_time=created_date_start_time, + created_date_end_time=created_date_end_time, + test_profile_run_ids=test_profile_run_ids, + test_profile_ids=test_profile_ids, + statuses=statuses, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models5.TestProfileRun], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + api_versions_list=["2024-05-01-preview", "2024-07-01-preview", "2024-12-01-preview"], + ) + async def stop_test_profile_run( + self, test_profile_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: + """Stop test profile run. + + Stop test profile run for the given test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~microsoft.loadtestservice.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestProfileRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_stop_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/py.typed b/sdk/loadtesting/azure-developer-loadtesting/customizations/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test.py new file mode 100644 index 000000000000..d29d34b30efd --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test.py @@ -0,0 +1,86 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_test( + test_id="12345678-1234-1234-1234-123456789012", + body={ + "autoStopCriteria": {"autoStopDisabled": True, "errorRate": 70, "errorRateTimeWindowInSeconds": 60}, + "description": "sample description", + "displayName": "Performance_LoadTest", + "engineBuiltInIdentityIds": [ + "/subscriptions/10000000-0000-0000-0000-000000000000/resourceGroups/samplerg1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sampleresourcename" + ], + "engineBuiltInIdentityType": "UserAssigned", + "environmentVariables": {"envvar1": "sampletext"}, + "keyvaultReferenceIdentityId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/sampleprovider/sampleresourcetype/sampleresourcename", + "keyvaultReferenceIdentityType": "UserAssigned", + "loadTestConfiguration": {"engineInstances": 6, "splitAllCSVs": True}, + "metricsReferenceIdentityId": "/subscriptions/10000000-0000-0000-0000-000000000000/resourceGroups/samplerg1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sampleresourcename", + "metricsReferenceIdentityType": "UserAssigned", + "passFailCriteria": { + "passFailMetrics": { + "fefd759d-7fe8-4f83-8b6d-aeebe0f491fe": { + "action": "continue", + "aggregate": "percentage", + "clientMetric": "response_time_ms", + "condition": ">", + "value": 20, + } + }, + "passFailServerMetrics": { + "fefd759d-7fe8-4f83-8b6d-aeebe0f491fe": { + "action": "continue", + "aggregation": "Average", + "condition": ">", + "metricName": "Percentage CPU", + "metricNamespace": "Microsoft.Compute/virtualMachines", + "resourceId": "/subscriptions/12345678-1234-1234-1234-123456789abc/resourceGroups/MyResourceGroup/providers/Microsoft.Compute/virtualMachines/MyVM", + "value": 20, + } + }, + }, + "secrets": { + "secret1": { + "type": "AKV_SECRET_URI", + "value": "https://samplevault.vault.azure.net/secrets/samplesecret/f113f91fd4c44a368049849c164db827", + } + }, + "subnetId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/Microsoft.Network/virtualNetworks/samplenetworkresource/subnets/AAAAA0A0A0", + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTest.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_app_components.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_app_components.py new file mode 100644 index 000000000000..2c4a39aa02f6 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_app_components.py @@ -0,0 +1,52 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_app_components.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_app_components( + test_id="ee843bd9-a6d4-4364-a45c-427a03c39fa7", + body={ + "components": { + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource": { + "displayName": "Performance_LoadTest_Insights", + "kind": "web", + "resourceName": "appcomponentresource", + "resourceType": "microsoft.insights/components", + } + } + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestAppComponents.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_profile.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_profile.py new file mode 100644 index 000000000000..2206f091558d --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_profile.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_profile.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_test_profile( + test_profile_id="12345678-1234-1234-1234-123456789012", + body={ + "description": "sample description", + "displayName": "Performance_TestProfile", + "targetResourceConfigurations": { + "configurations": { + "config1": {"httpConcurrency": 16, "instanceMemoryMB": 2048}, + "config2": {"httpConcurrency": 16, "instanceMemoryMB": 4096}, + }, + "kind": "FunctionsFlexConsumption", + }, + "targetResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/Microsoft.Web/sites/myfunctionapp", + "testId": "12346-abcd-6789", + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestProfile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_app_components.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_app_components.py new file mode 100644 index 000000000000..281ddf97e9c4 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_app_components.py @@ -0,0 +1,52 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_run_app_components.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_app_components( + test_run_id="ee843bd9-a6d4-4364-a45c-427a03c39fa7", + body={ + "components": { + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource": { + "displayName": "Performance_LoadTest_Insights", + "kind": "web", + "resourceName": "appcomponentresource", + "resourceType": "microsoft.insights/components", + } + } + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestRunAppComponents.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_server_metrics_config.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_server_metrics_config.py new file mode 100644 index 000000000000..63e643ee0cec --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_server_metrics_config.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_run_server_metrics_config.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_server_metrics_config( + test_run_id="edc6e529-d009-4b99-b763-ca492e3a2823", + body={ + "metrics": { + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource/providers/microsoft.insights/metricdefinitions/requests/duration": { + "aggregation": "Average", + "displayDescription": "sample description", + "metricNamespace": "microsoft.insights/components", + "name": "requests/duration", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource", + "resourceType": "microsoft.insights/components", + "unit": None, + } + } + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestRunServerMetricsConfig.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_server_metrics_config.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_server_metrics_config.py new file mode 100644 index 000000000000..558421e277fa --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_server_metrics_config.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_server_metrics_config.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_server_metrics_config( + test_id="edc6e529-d009-4b99-b763-ca492e3a2823", + body={ + "metrics": { + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource/providers/microsoft.insights/metricdefinitions/requests/duration": { + "aggregation": "Average", + "displayDescription": "sample description", + "metricNamespace": "microsoft.insights/components", + "name": "requests/duration", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource", + "resourceType": "microsoft.insights/components", + "unit": None, + } + } + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestServerMetricsConfig.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test.py new file mode 100644 index 000000000000..895ea887bb37 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test( + test_id="12345678-1234-1234-1234-123456789012", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTest.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_file.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_file.py new file mode 100644 index 000000000000..24eead669e3d --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_file.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test_file.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test_file( + test_id="12345678-1234-1234-1234-123456789012", + file_name="app.jmx", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTestFile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile.py new file mode 100644 index 000000000000..e9ebe66beb17 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test_profile.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test_profile( + test_profile_id="12345678-1234-1234-1234-123456789012", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTestProfile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile_run.py new file mode 100644 index 000000000000..f033049beda1 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile_run.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test_profile_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test_profile_run( + test_profile_run_id="12316678-1234-1234-1234-122451189012", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTestProfileRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_run.py new file mode 100644 index 000000000000..27a552886088 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_run.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test_run( + test_run_id="12316678-1234-1234-1234-122451189012", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTestRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test.py new file mode 100644 index 000000000000..f4fa97b9b85b --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test( + test_id="12345678-1234-1234-1234-123456789012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTest.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_file.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_file.py new file mode 100644 index 000000000000..bf52cbe67585 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_file.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_file.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_file( + test_id="12345678-1234-1234-1234-123456789012", + file_name="sample.jmx", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestFile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile.py new file mode 100644 index 000000000000..8ea884a93b2b --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_profile.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_profile( + test_profile_id="12345678-1234-1234-1234-123456789012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestProfile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executed.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executed.py new file mode 100644 index 000000000000..10d9432726d0 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executed.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_profile_run_executed.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_profile_run( + test_profile_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestProfileRun_Executed.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executing.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executing.py new file mode 100644 index 000000000000..31a09483e8e7 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executing.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_profile_run_executing.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_profile_run( + test_profile_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestProfileRun_Executing.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run.py new file mode 100644 index 000000000000..a9dee2f405f0 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_run( + test_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run_file.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run_file.py new file mode 100644 index 000000000000..411fe77038c3 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run_file.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_run_file.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_run_file( + test_run_id="12316678-1234-1234-1234-122451189012", + file_name="sample.jmx", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestRunFile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_metric_dimension_values.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_metric_dimension_values.py new file mode 100644 index 000000000000..0893654b69b7 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_metric_dimension_values.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_metric_dimension_values.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_metric_dimension_values( + test_run_id="12316678-1234-1234-1234-122451189012", + name="SamplerName", + metric_name="ActiveThreads", + metric_namespace="LoadTestRunMetrics", + time_interval="2022-09-24T19:00:40Z/2022-09-25T19:28:40Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListMetricDimensionValues.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_app_components.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_app_components.py new file mode 100644 index 000000000000..9b49106f0bac --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_app_components.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_app_components.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_app_components( + test_id="ee843bd9-a6d4-4364-a45c-427a03c39fa7", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestAppComponents.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_files.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_files.py new file mode 100644 index 000000000000..a8e43d484485 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_files.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_files.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_test_files( + test_id="12345678-1234-1234-1234-123456789012", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestFiles.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profile_runs.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profile_runs.py new file mode 100644 index 000000000000..ace1b5bd3879 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profile_runs.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_profile_runs.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_test_profile_runs() + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestProfileRuns.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profiles.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profiles.py new file mode 100644 index 000000000000..9049778a085d --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profiles.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_profiles.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_test_profiles() + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestProfiles.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_app_components.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_app_components.py new file mode 100644 index 000000000000..4d7e4b6ec69f --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_app_components.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_app_components.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_app_components( + test_run_id="ee843bd9-a6d4-4364-a45c-427a03c39fa7", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunAppComponents.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics.py new file mode 100644 index 000000000000..6e68909f3c2c --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_metrics( + test_run_id="12316678-1234-1234-1234-122451189012", + metric_name="ActiveThreads", + metric_namespace="LoadTestRunMetrics", + time_interval="2022-09-24T19:00:40Z/2022-09-25T19:28:40Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_definitions.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_definitions.py new file mode 100644 index 000000000000..95cb2a72ef07 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_definitions.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_metrics_definitions.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_metric_definitions( + test_run_id="12316678-1234-1234-1234-122451189012", + metric_namespace="LoadTestRunMetrics", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunMetricsDefinitions.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_namespaces.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_namespaces.py new file mode 100644 index 000000000000..0c5ca16bf254 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_namespaces.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_metrics_namespaces.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_metric_namespaces( + test_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunMetricsNamespaces.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_server_metrics_config.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_server_metrics_config.py new file mode 100644 index 000000000000..defb11ea513b --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_server_metrics_config.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_server_metrics_config.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_server_metrics_config( + test_run_id="12345678-1234-1234-1234-123456789012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunServerMetricsConfig.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_runs.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_runs.py new file mode 100644 index 000000000000..24b5699bb152 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_runs.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_runs.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_test_runs() + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRuns.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_server_metrics_config.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_server_metrics_config.py new file mode 100644 index 000000000000..13b5f3afff32 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_server_metrics_config.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_server_metrics_config.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_server_metrics_config( + test_id="12345678-1234-1234-1234-123456789012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestServerMetricsConfig.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_tests.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_tests.py new file mode 100644 index 000000000000..810fdaa0b97f --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_tests.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_tests.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_tests() + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTests.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_profile_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_profile_run.py new file mode 100644 index 000000000000..a5714bd663f1 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_profile_run.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python stop_test_profile_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.stop_test_profile_run( + test_profile_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/StopTestProfileRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_run.py new file mode 100644 index 000000000000..8f370f09a7c9 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_run.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python stop_test_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.stop_test_run( + test_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/StopTestRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/conftest.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/conftest.py new file mode 100644 index 000000000000..4350841161b5 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/conftest.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + loadtestadministration_subscription_id = os.environ.get( + "LOADTESTADMINISTRATION_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + loadtestadministration_tenant_id = os.environ.get( + "LOADTESTADMINISTRATION_TENANT_ID", "00000000-0000-0000-0000-000000000000" + ) + loadtestadministration_client_id = os.environ.get( + "LOADTESTADMINISTRATION_CLIENT_ID", "00000000-0000-0000-0000-000000000000" + ) + loadtestadministration_client_secret = os.environ.get( + "LOADTESTADMINISTRATION_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer( + regex=loadtestadministration_subscription_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=loadtestadministration_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestadministration_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer( + regex=loadtestadministration_client_secret, value="00000000-0000-0000-0000-000000000000" + ) + + loadtestrun_subscription_id = os.environ.get("LOADTESTRUN_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + loadtestrun_tenant_id = os.environ.get("LOADTESTRUN_TENANT_ID", "00000000-0000-0000-0000-000000000000") + loadtestrun_client_id = os.environ.get("LOADTESTRUN_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + loadtestrun_client_secret = os.environ.get("LOADTESTRUN_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestrun_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestrun_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestrun_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestrun_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration.py new file mode 100644 index 000000000000..db860d9d5d8c --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration.py @@ -0,0 +1,341 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import LoadTestAdministrationClientTestBase, LoadTestAdministrationPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLoadTestAdministration(LoadTestAdministrationClientTestBase): + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_create_or_update_test(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.create_or_update_test( + test_id="str", + body={ + "testId": "str", + "autoStopCriteria": {"autoStopDisabled": bool, "errorRate": 0.0, "errorRateTimeWindowInSeconds": 0}, + "baselineTestRunId": "str", + "certificate": {"name": "str", "type": "str", "value": "str"}, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "description": "str", + "displayName": "str", + "engineBuiltInIdentityIds": ["str"], + "engineBuiltInIdentityType": "str", + "environmentVariables": {"str": "str"}, + "inputArtifacts": { + "additionalFileInfo": [ + { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + } + ], + "configFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "inputArtifactsZipFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "testScriptFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "urlTestConfigFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "userPropFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + }, + "keyvaultReferenceIdentityId": "str", + "keyvaultReferenceIdentityType": "str", + "kind": "str", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "loadTestConfiguration": { + "engineInstances": 0, + "optionalLoadTestConfig": { + "duration": 0, + "endpointUrl": "str", + "maxResponseTimeInMs": 0, + "rampUpTime": 0, + "requestsPerSecond": 0, + "virtualUsers": 0, + }, + "quickStartTest": bool, + "regionalLoadTestConfig": [{"engineInstances": 0, "region": "str"}], + "splitAllCSVs": bool, + }, + "metricsReferenceIdentityId": "str", + "metricsReferenceIdentityType": "str", + "passFailCriteria": { + "passFailMetrics": { + "str": { + "action": "str", + "actualValue": 0.0, + "aggregate": "str", + "clientMetric": "str", + "condition": "str", + "requestName": "str", + "result": "str", + "value": 0.0, + } + }, + "passFailServerMetrics": { + "str": { + "aggregation": "str", + "condition": "str", + "metricName": "str", + "metricNamespace": "str", + "resourceId": "str", + "value": 0.0, + "action": "str", + "actualValue": 0.0, + "result": "str", + } + }, + }, + "publicIPDisabled": bool, + "secrets": {"str": {"type": "str", "value": "str"}}, + "subnetId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_create_or_update_app_components(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.create_or_update_app_components( + test_id="str", + body={ + "components": { + "str": { + "resourceId": "str", + "resourceName": "str", + "resourceType": "str", + "displayName": "str", + "kind": "str", + "resourceGroup": "str", + "subscriptionId": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_create_or_update_server_metrics_config(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.create_or_update_server_metrics_config( + test_id="str", + body={ + "metrics": { + "str": { + "aggregation": "str", + "metricNamespace": "str", + "name": "str", + "resourceId": "str", + "resourceType": "str", + "displayDescription": "str", + "id": "str", + "unit": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_app_components(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_app_components( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_server_metrics_config(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_server_metrics_config( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_test(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_test( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_test_file(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_test_file( + test_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_list_test_files(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.list_test_files( + test_id="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_list_tests(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.list_tests() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_delete_test_file(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.delete_test_file( + test_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_delete_test(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.delete_test( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_create_or_update_test_profile(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.create_or_update_test_profile( + test_profile_id="str", + body={ + "testProfileId": "str", + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "description": "str", + "displayName": "str", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "targetResourceConfigurations": "target_resource_configurations", + "targetResourceId": "str", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_delete_test_profile(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.delete_test_profile( + test_profile_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_test_profile(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_test_profile( + test_profile_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_list_test_profiles(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.list_test_profiles() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration_async.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration_async.py new file mode 100644 index 000000000000..ee605480da90 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration_async.py @@ -0,0 +1,342 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import LoadTestAdministrationPreparer +from testpreparer_async import LoadTestAdministrationClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLoadTestAdministrationAsync(LoadTestAdministrationClientTestBaseAsync): + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_create_or_update_test(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.create_or_update_test( + test_id="str", + body={ + "testId": "str", + "autoStopCriteria": {"autoStopDisabled": bool, "errorRate": 0.0, "errorRateTimeWindowInSeconds": 0}, + "baselineTestRunId": "str", + "certificate": {"name": "str", "type": "str", "value": "str"}, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "description": "str", + "displayName": "str", + "engineBuiltInIdentityIds": ["str"], + "engineBuiltInIdentityType": "str", + "environmentVariables": {"str": "str"}, + "inputArtifacts": { + "additionalFileInfo": [ + { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + } + ], + "configFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "inputArtifactsZipFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "testScriptFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "urlTestConfigFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "userPropFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + }, + "keyvaultReferenceIdentityId": "str", + "keyvaultReferenceIdentityType": "str", + "kind": "str", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "loadTestConfiguration": { + "engineInstances": 0, + "optionalLoadTestConfig": { + "duration": 0, + "endpointUrl": "str", + "maxResponseTimeInMs": 0, + "rampUpTime": 0, + "requestsPerSecond": 0, + "virtualUsers": 0, + }, + "quickStartTest": bool, + "regionalLoadTestConfig": [{"engineInstances": 0, "region": "str"}], + "splitAllCSVs": bool, + }, + "metricsReferenceIdentityId": "str", + "metricsReferenceIdentityType": "str", + "passFailCriteria": { + "passFailMetrics": { + "str": { + "action": "str", + "actualValue": 0.0, + "aggregate": "str", + "clientMetric": "str", + "condition": "str", + "requestName": "str", + "result": "str", + "value": 0.0, + } + }, + "passFailServerMetrics": { + "str": { + "aggregation": "str", + "condition": "str", + "metricName": "str", + "metricNamespace": "str", + "resourceId": "str", + "value": 0.0, + "action": "str", + "actualValue": 0.0, + "result": "str", + } + }, + }, + "publicIPDisabled": bool, + "secrets": {"str": {"type": "str", "value": "str"}}, + "subnetId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_create_or_update_app_components(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.create_or_update_app_components( + test_id="str", + body={ + "components": { + "str": { + "resourceId": "str", + "resourceName": "str", + "resourceType": "str", + "displayName": "str", + "kind": "str", + "resourceGroup": "str", + "subscriptionId": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_create_or_update_server_metrics_config(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.create_or_update_server_metrics_config( + test_id="str", + body={ + "metrics": { + "str": { + "aggregation": "str", + "metricNamespace": "str", + "name": "str", + "resourceId": "str", + "resourceType": "str", + "displayDescription": "str", + "id": "str", + "unit": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_app_components(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_app_components( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_server_metrics_config(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_server_metrics_config( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_test(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_test( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_test_file(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_test_file( + test_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_list_test_files(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = client.list_test_files( + test_id="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_list_tests(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = client.list_tests() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_delete_test_file(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.delete_test_file( + test_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_delete_test(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.delete_test( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_create_or_update_test_profile(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.create_or_update_test_profile( + test_profile_id="str", + body={ + "testProfileId": "str", + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "description": "str", + "displayName": "str", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "targetResourceConfigurations": "target_resource_configurations", + "targetResourceId": "str", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_delete_test_profile(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.delete_test_profile( + test_profile_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_test_profile(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_test_profile( + test_profile_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_list_test_profiles(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = client.list_test_profiles() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run.py new file mode 100644 index 000000000000..895274de2057 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run.py @@ -0,0 +1,242 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import LoadTestRunClientTestBase, LoadTestRunPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLoadTestRun(LoadTestRunClientTestBase): + @LoadTestRunPreparer() + @recorded_by_proxy + def test_create_or_update_app_components(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.create_or_update_app_components( + test_run_id="str", + body={ + "components": { + "str": { + "resourceId": "str", + "resourceName": "str", + "resourceType": "str", + "displayName": "str", + "kind": "str", + "resourceGroup": "str", + "subscriptionId": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testRunId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_create_or_update_server_metrics_config(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.create_or_update_server_metrics_config( + test_run_id="str", + body={ + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "metrics": { + "str": { + "aggregation": "str", + "metricNamespace": "str", + "name": "str", + "resourceId": "str", + "resourceType": "str", + "displayDescription": "str", + "id": "str", + "unit": "str", + } + }, + "testRunId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_delete_test_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.delete_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_app_components(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_app_components( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_server_metrics_config(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_server_metrics_config( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_test_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_test_run_file(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_test_run_file( + test_run_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_list_metric_dimension_values(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.list_metric_dimension_values( + test_run_id="str", + name="str", + metric_name="str", + metric_namespace="str", + time_interval="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_metric_definitions(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_metric_definitions( + test_run_id="str", + metric_namespace="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_metric_namespaces(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_metric_namespaces( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_list_metrics(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.list_metrics( + test_run_id="str", + metric_name="str", + metric_namespace="str", + time_interval="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_list_test_runs(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.list_test_runs() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_stop_test_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.stop_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_delete_test_profile_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.delete_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_test_profile_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_list_test_profile_runs(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.list_test_profile_runs() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_stop_test_profile_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.stop_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run_async.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run_async.py new file mode 100644 index 000000000000..963c78291566 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run_async.py @@ -0,0 +1,243 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import LoadTestRunPreparer +from testpreparer_async import LoadTestRunClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLoadTestRunAsync(LoadTestRunClientTestBaseAsync): + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_create_or_update_app_components(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.create_or_update_app_components( + test_run_id="str", + body={ + "components": { + "str": { + "resourceId": "str", + "resourceName": "str", + "resourceType": "str", + "displayName": "str", + "kind": "str", + "resourceGroup": "str", + "subscriptionId": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testRunId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_create_or_update_server_metrics_config(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.create_or_update_server_metrics_config( + test_run_id="str", + body={ + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "metrics": { + "str": { + "aggregation": "str", + "metricNamespace": "str", + "name": "str", + "resourceId": "str", + "resourceType": "str", + "displayDescription": "str", + "id": "str", + "unit": "str", + } + }, + "testRunId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_delete_test_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.delete_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_app_components(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_app_components( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_server_metrics_config(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_server_metrics_config( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_test_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_test_run_file(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_test_run_file( + test_run_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_list_metric_dimension_values(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = client.list_metric_dimension_values( + test_run_id="str", + name="str", + metric_name="str", + metric_namespace="str", + time_interval="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_metric_definitions(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_metric_definitions( + test_run_id="str", + metric_namespace="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_metric_namespaces(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_metric_namespaces( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_list_metrics(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = client.list_metrics( + test_run_id="str", + metric_name="str", + metric_namespace="str", + time_interval="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_list_test_runs(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = client.list_test_runs() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_stop_test_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.stop_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_delete_test_profile_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.delete_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_test_profile_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_list_test_profile_runs(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = client.list_test_profile_runs() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_stop_test_profile_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.stop_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer.py new file mode 100644 index 000000000000..a3c665485dac --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from customizations import LoadTestAdministrationClient, LoadTestRunClient +from devtools_testutils import AzureRecordedTestCase, PowerShellPreparer +import functools + + +class LoadTestAdministrationClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(LoadTestAdministrationClient) + return self.create_client_from_credential( + LoadTestAdministrationClient, + credential=credential, + endpoint=endpoint, + ) + + +LoadTestAdministrationPreparer = functools.partial( + PowerShellPreparer, + "loadtestadministration", + loadtestadministration_endpoint="https://fake_loadtestadministration_endpoint.com", +) + + +class LoadTestRunClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(LoadTestRunClient) + return self.create_client_from_credential( + LoadTestRunClient, + credential=credential, + endpoint=endpoint, + ) + + +LoadTestRunPreparer = functools.partial( + PowerShellPreparer, "loadtestrun", loadtestrun_endpoint="https://fake_loadtestrun_endpoint.com" +) diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer_async.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer_async.py new file mode 100644 index 000000000000..b02b742c2a01 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer_async.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from customizations.aio import LoadTestAdministrationClient, LoadTestRunClient +from devtools_testutils import AzureRecordedTestCase + + +class LoadTestAdministrationClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(LoadTestAdministrationClient, is_async=True) + return self.create_client_from_credential( + LoadTestAdministrationClient, + credential=credential, + endpoint=endpoint, + ) + + +class LoadTestRunClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(LoadTestRunClient, is_async=True) + return self.create_client_from_credential( + LoadTestRunClient, + credential=credential, + endpoint=endpoint, + ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/__init__.py new file mode 100644 index 000000000000..6e644e25a518 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/__init__.py @@ -0,0 +1,156 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + AppComponent, + ArtifactsContainerInfo, + AutoStopCriteria, + CertificateMetadata, + DimensionFilter, + DimensionValue, + ErrorDetails, + FunctionFlexConsumptionResourceConfiguration, + FunctionFlexConsumptionTargetResourceConfigurations, + LoadTestConfiguration, + MetricAvailability, + MetricDefinition, + MetricDefinitionCollection, + MetricNamespace, + MetricNamespaceCollection, + MetricRequestPayload, + MetricValue, + NameAndDescription, + OptionalLoadTestConfiguration, + PassFailCriteria, + PassFailMetric, + PassFailServerMetric, + RegionalConfiguration, + ResourceMetric, + Secret, + TargetResourceConfigurations, + Test, + TestAppComponents, + TestFileInfo, + TestInputArtifacts, + TestProfile, + TestProfileRun, + TestProfileRunRecommendation, + TestRun, + TestRunAppComponents, + TestRunArtifacts, + TestRunDetail, + TestRunFileInfo, + TestRunInputArtifacts, + TestRunOutputArtifacts, + TestRunServerMetricsConfiguration, + TestRunStatistics, + TestServerMetricsConfiguration, + TimeSeriesElement, +) + +from ._enums import ( # type: ignore + Aggregation, + CertificateType, + CreatedByType, + FileType, + FileValidationStatus, + ManagedIdentityType, + MetricUnit, + PFMetrics, + PassFailAction, + PassFailAggregationFunction, + PassFailResult, + PassFailTestResult, + RecommendationCategory, + RequestDataLevel, + ResourceKind, + SecretType, + TestKind, + TestProfileRunStatus, + TestRunStatus, + TimeGrain, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AppComponent", + "ArtifactsContainerInfo", + "AutoStopCriteria", + "CertificateMetadata", + "DimensionFilter", + "DimensionValue", + "ErrorDetails", + "FunctionFlexConsumptionResourceConfiguration", + "FunctionFlexConsumptionTargetResourceConfigurations", + "LoadTestConfiguration", + "MetricAvailability", + "MetricDefinition", + "MetricDefinitionCollection", + "MetricNamespace", + "MetricNamespaceCollection", + "MetricRequestPayload", + "MetricValue", + "NameAndDescription", + "OptionalLoadTestConfiguration", + "PassFailCriteria", + "PassFailMetric", + "PassFailServerMetric", + "RegionalConfiguration", + "ResourceMetric", + "Secret", + "TargetResourceConfigurations", + "Test", + "TestAppComponents", + "TestFileInfo", + "TestInputArtifacts", + "TestProfile", + "TestProfileRun", + "TestProfileRunRecommendation", + "TestRun", + "TestRunAppComponents", + "TestRunArtifacts", + "TestRunDetail", + "TestRunFileInfo", + "TestRunInputArtifacts", + "TestRunOutputArtifacts", + "TestRunServerMetricsConfiguration", + "TestRunStatistics", + "TestServerMetricsConfiguration", + "TimeSeriesElement", + "Aggregation", + "CertificateType", + "CreatedByType", + "FileType", + "FileValidationStatus", + "ManagedIdentityType", + "MetricUnit", + "PFMetrics", + "PassFailAction", + "PassFailAggregationFunction", + "PassFailResult", + "PassFailTestResult", + "RecommendationCategory", + "RequestDataLevel", + "ResourceKind", + "SecretType", + "TestKind", + "TestProfileRunStatus", + "TestRunStatus", + "TimeGrain", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_enums.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_enums.py new file mode 100644 index 000000000000..f6ecc0080712 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_enums.py @@ -0,0 +1,317 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class Aggregation(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Aggregation type.""" + + AVERAGE = "Average" + """Average value.""" + COUNT = "Count" + """Total count.""" + NONE = "None" + """Aggregation will be average in this case.""" + TOTAL = "Total" + """Total sum.""" + PERCENTILE75 = "Percentile75" + """75th percentile.""" + PERCENTILE90 = "Percentile90" + """90th percentile.""" + PERCENTILE95 = "Percentile95" + """95th percentile.""" + PERCENTILE96 = "Percentile96" + """96th percentile.""" + PERCENTILE97 = "Percentile97" + """97th percentile.""" + PERCENTILE98 = "Percentile98" + """98th percentile.""" + PERCENTILE99 = "Percentile99" + """99th percentile.""" + PERCENTILE999 = "Percentile999" + """99.9th percentile.""" + PERCENTILE9999 = "Percentile9999" + """99.99th percentile.""" + + +class CertificateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Types of certificates supported.""" + + KEY_VAULT_CERTIFICATE_URI = "AKV_CERT_URI" + """If the certificate is stored in an Azure Key Vault.""" + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the entity that created the test run. (E.x. User, ScheduleTrigger, etc).""" + + USER = "User" + """Entity was created by a user.""" + SCHEDULED_TRIGGER = "ScheduledTrigger" + """Entity was created by a scheduled trigger.""" + + +class FileType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Types of file supported.""" + + JMX_FILE = "JMX_FILE" + """If the file is a JMX script.""" + USER_PROPERTIES = "USER_PROPERTIES" + """If the file is a user properties file.""" + ADDITIONAL_ARTIFACTS = "ADDITIONAL_ARTIFACTS" + """If the file is not among any of the other supported file types.""" + ZIPPED_ARTIFACTS = "ZIPPED_ARTIFACTS" + """If the file is a compressed archive containing a collection of various artifacts or resources.""" + URL_TEST_CONFIG = "URL_TEST_CONFIG" + """If the file is a JSON config file to define the requests for a URL test.""" + TEST_SCRIPT = "TEST_SCRIPT" + """If the file is a test script.""" + + +class FileValidationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """File status.""" + + NOT_VALIDATED = "NOT_VALIDATED" + """File is not validated.""" + VALIDATION_SUCCESS = "VALIDATION_SUCCESS" + """File is validated.""" + VALIDATION_FAILURE = "VALIDATION_FAILURE" + """File validation is failed.""" + VALIDATION_INITIATED = "VALIDATION_INITIATED" + """File validation is in progress.""" + VALIDATION_NOT_REQUIRED = "VALIDATION_NOT_REQUIRED" + """Validation is not required.""" + + +class ManagedIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Managed identity type.""" + + SYSTEM_ASSIGNED = "SystemAssigned" + """System-assigned managed identity.""" + USER_ASSIGNED = "UserAssigned" + """User-assigned managed identity.""" + + +class MetricUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Metric unit.""" + + NOT_SPECIFIED = "NotSpecified" + """No unit specified.""" + PERCENT = "Percent" + """Percentage.""" + COUNT = "Count" + """Value count.""" + SECONDS = "Seconds" + """Seconds.""" + MILLISECONDS = "Milliseconds" + """Milliseconds""" + BYTES = "Bytes" + """Bytes""" + BYTES_PER_SECOND = "BytesPerSecond" + """Bytes per second""" + COUNT_PER_SECOND = "CountPerSecond" + """Count per second""" + + +class PassFailAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Action to take on failure of pass/fail criteria.""" + + CONTINUE = "continue" + """Test will continue to run even if pass fail metric criteria metric gets failed.""" + STOP = "stop" + """Test run will stop if pass fail criteria metric is not passed.""" + + +class PassFailAggregationFunction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Aggregation functions for pass/fail criteria.""" + + COUNT = "count" + """Criteria applies for count value.""" + PERCENTAGE = "percentage" + """Criteria applies for given percentage value.""" + AVERAGE = "avg" + """Criteria applies for avg value.""" + PERCENTILE50 = "p50" + """Criteria applies for 50th percentile value.""" + PERCENTILE75 = "p75" + """Criteria applies for 75th percentile value.""" + PERCENTILE90 = "p90" + """Criteria applies for 90th percentile value.""" + PERCENTILE95 = "p95" + """Criteria applies for 95th percentile value.""" + PERCENTILE96 = "p96" + """Criteria applies for 96th percentile value.""" + PERCENTILE97 = "p97" + """Criteria applies for 97th percentile value.""" + PERCENTILE98 = "p98" + """Criteria applies for 98th percentile value.""" + PERCENTILE99 = "p99" + """Criteria applies for 99th percentile value.""" + PERCENTILE999 = "p99.9" + """Criteria applies for 99.9th percentile value.""" + PERCENTILE9999 = "p99.99" + """Criteria applies for 99.99th percentile value.""" + MINIMUM = "min" + """Criteria applies for minimum value.""" + MAXIMUM = "max" + """Criteria applies for maximum value.""" + + +class PassFailResult(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Pass/fail criteria result.""" + + PASSED = "passed" + """Given pass fail criteria metric has passed.""" + UNDETERMINED = "undetermined" + """Given pass fail criteria metric couldn't determine.""" + FAILED = "failed" + """Given pass fail criteria metric has failed.""" + + +class PassFailTestResult(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Test result based on pass/fail criteria.""" + + PASSED = "PASSED" + """Pass/fail criteria has passed.""" + NOT_APPLICABLE = "NOT_APPLICABLE" + """Pass/fail criteria is not applicable.""" + FAILED = "FAILED" + """Pass/fail criteria has failed.""" + + +class PFMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Metrics for pass/fail criteria.""" + + RESPONSE_TIME_IN_MILLISECONDS = "response_time_ms" + """Pass fail criteria for response time metric in milliseconds.""" + LATENCY = "latency" + """Pass fail criteria for latency metric in milliseconds.""" + ERROR = "error" + """Pass fail criteria for error metric.""" + REQUESTS = "requests" + """Pass fail criteria for total requests.""" + REQUESTS_PER_SECOND = "requests_per_sec" + """Pass fail criteria for request per second.""" + + +class RecommendationCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Category of Recommendation.""" + + THROUGHPUT_OPTIMIZED = "ThroughputOptimized" + """The recommendation for this category optimizes the throughput/RPS (Requests per Second) of the + app.""" + COST_OPTIMIZED = "CostOptimized" + """The recommendation for this category optimizes the cost of the app.""" + + +class RequestDataLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Request data collection level for test run.""" + + NONE = "NONE" + """No request data will be collected""" + ERRORS = "ERRORS" + """Request data will be collected in case of failed requests""" + + +class ResourceKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Kind of the resource on which test profile is created.""" + + FUNCTIONS_FLEX_CONSUMPTION = "FunctionsFlexConsumption" + """Resource is a Azure FunctionApp on Flex Consumption Plan.""" + + +class SecretType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Types of secrets supported.""" + + KEY_VAULT_SECRET_URI = "AKV_SECRET_URI" + """If the secret is stored in an Azure Key Vault.""" + SECRET_VALUE = "SECRET_VALUE" + """If the secret value provided as plain text.""" + + +class TestKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Test kind.""" + + URL = "URL" + """URL Test""" + JMX = "JMX" + """JMX Test""" + LOCUST = "Locust" + """Locust Test""" + + +class TestProfileRunStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Test profile run status.""" + + ACCEPTED = "ACCEPTED" + """Test profile run request is accepted.""" + NOT_STARTED = "NOTSTARTED" + """Test profile run is not yet started.""" + EXECUTING = "EXECUTING" + """Test profile run has started executing.""" + DONE = "DONE" + """Test profile run has completed successfully.""" + CANCELLING = "CANCELLING" + """Test profile run is being cancelled.""" + CANCELLED = "CANCELLED" + """Test profile run is cancelled.""" + FAILED = "FAILED" + """Test profile run has failed.""" + + +class TestRunStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Test run status.""" + + ACCEPTED = "ACCEPTED" + """Test run request is accepted.""" + NOT_STARTED = "NOTSTARTED" + """Test run is not yet started.""" + PROVISIONING = "PROVISIONING" + """Test run is provisioning.""" + PROVISIONED = "PROVISIONED" + """Test run is provisioned.""" + CONFIGURING = "CONFIGURING" + """Test run is getting configured.""" + CONFIGURED = "CONFIGURED" + """Test run configuration is done.""" + EXECUTING = "EXECUTING" + """Test run has started executing.""" + EXECUTED = "EXECUTED" + """Test run execution is completed.""" + DEPROVISIONING = "DEPROVISIONING" + """Test run is getting deprovisioned.""" + DEPROVISIONED = "DEPROVISIONED" + """Test run is deprovisioned.""" + DONE = "DONE" + """Test run is completed.""" + CANCELLING = "CANCELLING" + """Test run is being cancelled.""" + CANCELLED = "CANCELLED" + """Test run request is cancelled.""" + FAILED = "FAILED" + """Test run request is failed.""" + VALIDATION_SUCCESS = "VALIDATION_SUCCESS" + """Test run JMX file is validated.""" + VALIDATION_FAILURE = "VALIDATION_FAILURE" + """Test run JMX file validation is failed.""" + + +class TimeGrain(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Time Grain.""" + + PT5S = "PT5S" + """5 seconds, available only if test run duration is less than 10 minutes.""" + PT10S = "PT10S" + """10 seconds, available only if test run duration is less than 10 minutes.""" + PT1M = "PT1M" + """1 minute""" + PT5M = "PT5M" + """5 minutes, available only if test run duration is greater than 1 minute.""" + PT1H = "PT1H" + """1 hour, available only if test run duration is greater than 1 minute.""" diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_models.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_models.py new file mode 100644 index 000000000000..ff66baac031f --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_models.py @@ -0,0 +1,2605 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from ....customizations._utils.model_base import Model as _Model, rest_discriminator, rest_field +from ._enums import ResourceKind + +if TYPE_CHECKING: + from .. import models as _models + + +class AppComponent(_Model): + """An Azure resource object (Refer azure generic resource model + :`https://learn.microsoft.com/en-us/rest/api/resources/resources/get-by-id#genericresource + `_). + + :ivar resource_id: fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + Required. + :vartype resource_id: str + :ivar resource_name: Azure resource name, required while creating the app component. Required. + :vartype resource_name: str + :ivar resource_type: Azure resource type, required while creating the app component. Required. + :vartype resource_type: str + :ivar display_name: Azure resource display name. + :vartype display_name: str + :ivar resource_group: Resource group name of the Azure resource. + :vartype resource_group: str + :ivar subscription_id: Subscription Id of the Azure resource. + :vartype subscription_id: str + :ivar kind: Kind of Azure resource type. + :vartype kind: str + """ + + resource_id: str = rest_field(name="resourceId", visibility=["read"]) + """fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + Required.""" + resource_name: str = rest_field(name="resourceName", visibility=["read", "create", "update", "delete", "query"]) + """Azure resource name, required while creating the app component. Required.""" + resource_type: str = rest_field(name="resourceType", visibility=["read", "create", "update", "delete", "query"]) + """Azure resource type, required while creating the app component. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource display name.""" + resource_group: Optional[str] = rest_field(name="resourceGroup", visibility=["read"]) + """Resource group name of the Azure resource.""" + subscription_id: Optional[str] = rest_field(name="subscriptionId", visibility=["read"]) + """Subscription Id of the Azure resource.""" + kind: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Kind of Azure resource type.""" + + @overload + def __init__( + self, + *, + resource_name: str, + resource_type: str, + display_name: Optional[str] = None, + kind: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ArtifactsContainerInfo(_Model): + """Artifacts container info. + + :ivar url: This is a SAS URI to an Azure Storage Container that contains the test run + artifacts. + :vartype url: str + :ivar expire_date_time: Expiry time of the container (RFC 3339 literal format). + :vartype expire_date_time: ~datetime.datetime + """ + + url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """This is a SAS URI to an Azure Storage Container that contains the test run artifacts.""" + expire_date_time: Optional[datetime.datetime] = rest_field( + name="expireDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Expiry time of the container (RFC 3339 literal format).""" + + @overload + def __init__( + self, + *, + url: Optional[str] = None, + expire_date_time: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AutoStopCriteria(_Model): + """Auto stop criteria for a test. This will automatically stop a load test if the error percentage + is high for a certain time window. + + :ivar auto_stop_disabled: Whether auto-stop should be disabled. The default value is false. + :vartype auto_stop_disabled: bool + :ivar error_rate: Threshold percentage of errors on which test run should be automatically + stopped. Allowed values are in range of 0.0-100.0. + :vartype error_rate: float + :ivar error_rate_time_window_in_seconds: Time window during which the error percentage should + be evaluated in seconds. + :vartype error_rate_time_window_in_seconds: int + """ + + auto_stop_disabled: Optional[bool] = rest_field( + name="autoStopDisabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Whether auto-stop should be disabled. The default value is false.""" + error_rate: Optional[float] = rest_field( + name="errorRate", visibility=["read", "create", "update", "delete", "query"] + ) + """Threshold percentage of errors on which test run should be automatically stopped. Allowed + values are in range of 0.0-100.0.""" + error_rate_time_window_in_seconds: Optional[int] = rest_field( + name="errorRateTimeWindowInSeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Time window during which the error percentage should be evaluated in seconds.""" + + @overload + def __init__( + self, + *, + auto_stop_disabled: Optional[bool] = None, + error_rate: Optional[float] = None, + error_rate_time_window_in_seconds: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CertificateMetadata(_Model): + """Certificates metadata. + + :ivar value: The value of the certificate for respective type. + :vartype value: str + :ivar type: Type of certificate. "AKV_CERT_URI" + :vartype type: str or ~microsoft.loadtestservice.models.CertificateType + :ivar name: Name of the certificate. + :vartype name: str + """ + + value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value of the certificate for respective type.""" + type: Optional[Union[str, "_models.CertificateType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Type of certificate. \"AKV_CERT_URI\"""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the certificate.""" + + @overload + def __init__( + self, + *, + value: Optional[str] = None, + type: Optional[Union[str, "_models.CertificateType"]] = None, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DimensionFilter(_Model): + """Dimension name and values to filter. + + :ivar name: The dimension name. + :vartype name: str + :ivar values_property: The dimension values. Maximum values can be 20. + :vartype values_property: list[str] + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The dimension name.""" + values_property: Optional[List[str]] = rest_field( + name="values", visibility=["read", "create", "update", "delete", "query"] + ) + """The dimension values. Maximum values can be 20.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + values_property: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DimensionValue(_Model): + """Represents a metric dimension value. + + :ivar name: The name of the dimension. + :vartype name: str + :ivar value: The value of the dimension. + :vartype value: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the dimension.""" + value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value of the dimension.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + value: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorDetails(_Model): + """Error details if there is any failure in load test run. + + :ivar message: Error details in case test run was not successfully run. + :vartype message: str + """ + + message: Optional[str] = rest_field(visibility=["read"]) + """Error details in case test run was not successfully run.""" + + +class FunctionFlexConsumptionResourceConfiguration(_Model): # pylint: disable=name-too-long + """Resource configuration instance for a Flex Consumption based Azure Function App. + + :ivar instance_memory_mb: Memory size of the instance. Supported values are 2048, 4096. + Required. + :vartype instance_memory_mb: int + :ivar http_concurrency: HTTP Concurrency for the function app. + :vartype http_concurrency: int + """ + + instance_memory_mb: int = rest_field( + name="instanceMemoryMB", visibility=["read", "create", "update", "delete", "query"] + ) + """Memory size of the instance. Supported values are 2048, 4096. Required.""" + http_concurrency: Optional[int] = rest_field( + name="httpConcurrency", visibility=["read", "create", "update", "delete", "query"] + ) + """HTTP Concurrency for the function app.""" + + @overload + def __init__( + self, + *, + instance_memory_mb: int, + http_concurrency: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TargetResourceConfigurations(_Model): + """Configurations of a target resource. This varies with the kind of resource. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FunctionFlexConsumptionTargetResourceConfigurations + + :ivar kind: Kind of the resource for which the configurations apply. Required. + "FunctionsFlexConsumption" + :vartype kind: str or ~microsoft.loadtestservice.models.ResourceKind + """ + + __mapping__: Dict[str, _Model] = {} + kind: str = rest_discriminator(name="kind", visibility=["read", "create"]) + """Kind of the resource for which the configurations apply. Required. \"FunctionsFlexConsumption\"""" + + @overload + def __init__( + self, + *, + kind: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FunctionFlexConsumptionTargetResourceConfigurations( + TargetResourceConfigurations, discriminator="FunctionsFlexConsumption" +): # pylint: disable=name-too-long + """Configurations for a Function App using Flex Consumption Plan. + + :ivar kind: The kind value to use when providing configuration. + This should typically be not changed from its value. Required. Resource is a Azure FunctionApp + on Flex Consumption Plan. + :vartype kind: str or ~microsoft.loadtestservice.models.FUNCTIONS_FLEX_CONSUMPTION + :ivar configurations: A map of configurations for a Function app using Flex Consumption Plan. + :vartype configurations: dict[str, + ~microsoft.loadtestservice.models.FunctionFlexConsumptionResourceConfiguration] + """ + + kind: Literal[ResourceKind.FUNCTIONS_FLEX_CONSUMPTION] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The kind value to use when providing configuration. + This should typically be not changed from its value. Required. Resource is a Azure FunctionApp + on Flex Consumption Plan.""" + configurations: Optional[Dict[str, "_models.FunctionFlexConsumptionResourceConfiguration"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A map of configurations for a Function app using Flex Consumption Plan.""" + + @overload + def __init__( + self, + *, + configurations: Optional[Dict[str, "_models.FunctionFlexConsumptionResourceConfiguration"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, kind=ResourceKind.FUNCTIONS_FLEX_CONSUMPTION, **kwargs) + + +class LoadTestConfiguration(_Model): + """Configurations for the load test. + + :ivar engine_instances: The number of engine instances to execute load test. Supported values + are in range of 1-400. Required for creating a new test. + :vartype engine_instances: int + :ivar split_all_csvs: If false, Azure Load Testing copies and processes your input files + unmodified + across all test engine instances. If true, Azure Load Testing splits the CSV + input data evenly across all engine instances. If you provide multiple CSV + files, each file will be split evenly. + :vartype split_all_csvs: bool + :ivar quick_start_test: If true, optionalLoadTestConfig is required and JMX script for the load + test is + not required to upload. + :vartype quick_start_test: bool + :ivar optional_load_test_config: Configuration for quick load test. + :vartype optional_load_test_config: + ~microsoft.loadtestservice.models.OptionalLoadTestConfiguration + :ivar regional_load_test_config: Region distribution configuration for the load test. + :vartype regional_load_test_config: + list[~microsoft.loadtestservice.models.RegionalConfiguration] + """ + + engine_instances: Optional[int] = rest_field( + name="engineInstances", visibility=["read", "create", "update", "delete", "query"] + ) + """The number of engine instances to execute load test. Supported values are in range of 1-400. + Required for creating a new test.""" + split_all_csvs: Optional[bool] = rest_field( + name="splitAllCSVs", visibility=["read", "create", "update", "delete", "query"] + ) + """If false, Azure Load Testing copies and processes your input files unmodified + across all test engine instances. If true, Azure Load Testing splits the CSV + input data evenly across all engine instances. If you provide multiple CSV + files, each file will be split evenly.""" + quick_start_test: Optional[bool] = rest_field( + name="quickStartTest", visibility=["read", "create", "update", "delete", "query"] + ) + """If true, optionalLoadTestConfig is required and JMX script for the load test is + not required to upload.""" + optional_load_test_config: Optional["_models.OptionalLoadTestConfiguration"] = rest_field( + name="optionalLoadTestConfig", visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration for quick load test.""" + regional_load_test_config: Optional[List["_models.RegionalConfiguration"]] = rest_field( + name="regionalLoadTestConfig", visibility=["read", "create", "update", "delete", "query"] + ) + """Region distribution configuration for the load test.""" + + @overload + def __init__( + self, + *, + engine_instances: Optional[int] = None, + split_all_csvs: Optional[bool] = None, + quick_start_test: Optional[bool] = None, + optional_load_test_config: Optional["_models.OptionalLoadTestConfiguration"] = None, + regional_load_test_config: Optional[List["_models.RegionalConfiguration"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricAvailability(_Model): + """Metric availability specifies the time grain (aggregation interval or frequency). + + :ivar time_grain: The time grain specifies the aggregation interval for the metric. Expressed + as + a duration 'PT1M', 'PT1H', etc. Known values are: "PT5S", "PT10S", "PT1M", "PT5M", and "PT1H". + :vartype time_grain: str or ~microsoft.loadtestservice.models.TimeGrain + """ + + time_grain: Optional[Union[str, "_models.TimeGrain"]] = rest_field( + name="timeGrain", visibility=["read", "create", "update", "delete", "query"] + ) + """The time grain specifies the aggregation interval for the metric. Expressed as + a duration 'PT1M', 'PT1H', etc. Known values are: \"PT5S\", \"PT10S\", \"PT1M\", \"PT5M\", and + \"PT1H\".""" + + @overload + def __init__( + self, + *, + time_grain: Optional[Union[str, "_models.TimeGrain"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricDefinition(_Model): + """Metric definition. + + :ivar dimensions: List of dimensions. + :vartype dimensions: list[~microsoft.loadtestservice.models.NameAndDescription] + :ivar description: The metric description. + :vartype description: str + :ivar name: The metric name. + :vartype name: str + :ivar namespace: The namespace the metric belongs to. + :vartype namespace: str + :ivar primary_aggregation_type: The primary aggregation type value defining how to use the + values for display. Known values are: "Average", "Count", "None", "Total", "Percentile75", + "Percentile90", "Percentile95", "Percentile96", "Percentile97", "Percentile98", "Percentile99", + "Percentile999", and "Percentile9999". + :vartype primary_aggregation_type: str or ~microsoft.loadtestservice.models.Aggregation + :ivar supported_aggregation_types: The collection of what all aggregation types are supported. + :vartype supported_aggregation_types: list[str] + :ivar unit: The unit of the metric. Known values are: "NotSpecified", "Percent", "Count", + "Seconds", "Milliseconds", "Bytes", "BytesPerSecond", and "CountPerSecond". + :vartype unit: str or ~microsoft.loadtestservice.models.MetricUnit + :ivar metric_availabilities: Metric availability specifies the time grain (aggregation interval + or + frequency). + :vartype metric_availabilities: list[~microsoft.loadtestservice.models.MetricAvailability] + """ + + dimensions: Optional[List["_models.NameAndDescription"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of dimensions.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metric description.""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metric name.""" + namespace: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The namespace the metric belongs to.""" + primary_aggregation_type: Optional[Union[str, "_models.Aggregation"]] = rest_field( + name="primaryAggregationType", visibility=["read", "create", "update", "delete", "query"] + ) + """The primary aggregation type value defining how to use the values for display. Known values + are: \"Average\", \"Count\", \"None\", \"Total\", \"Percentile75\", \"Percentile90\", + \"Percentile95\", \"Percentile96\", \"Percentile97\", \"Percentile98\", \"Percentile99\", + \"Percentile999\", and \"Percentile9999\".""" + supported_aggregation_types: Optional[List[str]] = rest_field( + name="supportedAggregationTypes", visibility=["read", "create", "update", "delete", "query"] + ) + """The collection of what all aggregation types are supported.""" + unit: Optional[Union[str, "_models.MetricUnit"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The unit of the metric. Known values are: \"NotSpecified\", \"Percent\", \"Count\", + \"Seconds\", \"Milliseconds\", \"Bytes\", \"BytesPerSecond\", and \"CountPerSecond\".""" + metric_availabilities: Optional[List["_models.MetricAvailability"]] = rest_field( + name="metricAvailabilities", visibility=["read", "create", "update", "delete", "query"] + ) + """Metric availability specifies the time grain (aggregation interval or + frequency).""" + + @overload + def __init__( + self, + *, + dimensions: Optional[List["_models.NameAndDescription"]] = None, + description: Optional[str] = None, + name: Optional[str] = None, + namespace: Optional[str] = None, + primary_aggregation_type: Optional[Union[str, "_models.Aggregation"]] = None, + supported_aggregation_types: Optional[List[str]] = None, + unit: Optional[Union[str, "_models.MetricUnit"]] = None, + metric_availabilities: Optional[List["_models.MetricAvailability"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricDefinitionCollection(_Model): + """Represents collection of metric definitions. + + :ivar value: the values for the metric definitions. Required. + :vartype value: list[~microsoft.loadtestservice.models.MetricDefinition] + """ + + value: List["_models.MetricDefinition"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """the values for the metric definitions. Required.""" + + @overload + def __init__( + self, + *, + value: List["_models.MetricDefinition"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricNamespace(_Model): + """Metric namespace class specifies the metadata for a metric namespace. + + :ivar description: The namespace description. + :vartype description: str + :ivar name: The metric namespace name. + :vartype name: str + """ + + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The namespace description.""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metric namespace name.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricNamespaceCollection(_Model): + """Represents collection of metric namespaces. + + :ivar value: The values for the metric namespaces. Required. + :vartype value: list[~microsoft.loadtestservice.models.MetricNamespace] + """ + + value: List["_models.MetricNamespace"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The values for the metric namespaces. Required.""" + + @overload + def __init__( + self, + *, + value: List["_models.MetricNamespace"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricRequestPayload(_Model): + """Filters to fetch the set of metric. + + :ivar filters: Get metrics for specific dimension values. Example: Metric contains dimension + like SamplerName, Error. To retrieve all the time series data where SamplerName + is equals to HTTPRequest1 or HTTPRequest2, the DimensionFilter value will be + {"SamplerName", ["HTTPRequest1", "HTTPRequest2"}. + :vartype filters: list[~microsoft.loadtestservice.models.DimensionFilter] + """ + + filters: Optional[List["_models.DimensionFilter"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Get metrics for specific dimension values. Example: Metric contains dimension + like SamplerName, Error. To retrieve all the time series data where SamplerName + is equals to HTTPRequest1 or HTTPRequest2, the DimensionFilter value will be + {\"SamplerName\", [\"HTTPRequest1\", \"HTTPRequest2\"}.""" + + @overload + def __init__( + self, + *, + filters: Optional[List["_models.DimensionFilter"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricValue(_Model): + """Represents a metric value. + + :ivar timestamp: The timestamp for the metric value in RFC 3339 format. + :vartype timestamp: ~datetime.datetime + :ivar value: The metric value. + :vartype value: float + """ + + timestamp: Optional[datetime.datetime] = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp for the metric value in RFC 3339 format.""" + value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metric value.""" + + @overload + def __init__( + self, + *, + timestamp: Optional[datetime.datetime] = None, + value: Optional[float] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NameAndDescription(_Model): + """The name and description. + + :ivar description: The description. + :vartype description: str + :ivar name: The name. + :vartype name: str + """ + + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The description.""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OptionalLoadTestConfiguration(_Model): + """Configuration for quick load test. + + :ivar endpoint_url: Test URL. Provide the complete HTTP URL. For example, + `https://contoso-app.azurewebsites.net/login `_. + :vartype endpoint_url: str + :ivar requests_per_second: Target throughput (requests per second). This may not be necessarily + achieved. The actual throughput will be lower if the application is not capable of handling it. + :vartype requests_per_second: int + :ivar max_response_time_in_ms: Maximum response time in milliseconds of the API/endpoint. + :vartype max_response_time_in_ms: int + :ivar virtual_users: No of concurrent virtual users. + :vartype virtual_users: int + :ivar ramp_up_time: Ramp up time in seconds. + :vartype ramp_up_time: int + :ivar duration: Test run duration in seconds. + :vartype duration: int + """ + + endpoint_url: Optional[str] = rest_field( + name="endpointUrl", visibility=["read", "create", "update", "delete", "query"] + ) + """Test URL. Provide the complete HTTP URL. For example, + `https://contoso-app.azurewebsites.net/login `_.""" + requests_per_second: Optional[int] = rest_field( + name="requestsPerSecond", visibility=["read", "create", "update", "delete", "query"] + ) + """Target throughput (requests per second). This may not be necessarily achieved. The actual + throughput will be lower if the application is not capable of handling it.""" + max_response_time_in_ms: Optional[int] = rest_field( + name="maxResponseTimeInMs", visibility=["read", "create", "update", "delete", "query"] + ) + """Maximum response time in milliseconds of the API/endpoint.""" + virtual_users: Optional[int] = rest_field( + name="virtualUsers", visibility=["read", "create", "update", "delete", "query"] + ) + """No of concurrent virtual users.""" + ramp_up_time: Optional[int] = rest_field( + name="rampUpTime", visibility=["read", "create", "update", "delete", "query"] + ) + """Ramp up time in seconds.""" + duration: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Test run duration in seconds.""" + + @overload + def __init__( + self, + *, + endpoint_url: Optional[str] = None, + requests_per_second: Optional[int] = None, + max_response_time_in_ms: Optional[int] = None, + virtual_users: Optional[int] = None, + ramp_up_time: Optional[int] = None, + duration: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PassFailCriteria(_Model): + """Pass fail criteria for a test. + + :ivar pass_fail_metrics: Map of id and pass fail metrics { id : pass fail metrics }. + :vartype pass_fail_metrics: dict[str, ~microsoft.loadtestservice.models.PassFailMetric] + :ivar pass_fail_server_metrics: Map of id and pass fail server metrics { id : pass fail + metrics }. + :vartype pass_fail_server_metrics: dict[str, + ~microsoft.loadtestservice.models.PassFailServerMetric] + """ + + pass_fail_metrics: Optional[Dict[str, "_models.PassFailMetric"]] = rest_field( + name="passFailMetrics", visibility=["read", "create", "update", "delete", "query"] + ) + """Map of id and pass fail metrics { id : pass fail metrics }.""" + pass_fail_server_metrics: Optional[Dict[str, "_models.PassFailServerMetric"]] = rest_field( + name="passFailServerMetrics", visibility=["read", "create", "update", "delete", "query"] + ) + """Map of id and pass fail server metrics { id : pass fail metrics }.""" + + @overload + def __init__( + self, + *, + pass_fail_metrics: Optional[Dict[str, "_models.PassFailMetric"]] = None, + pass_fail_server_metrics: Optional[Dict[str, "_models.PassFailServerMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PassFailMetric(_Model): + """Pass fail metric. + + :ivar client_metric: The client metric on which the criteria should be applied. Known values + are: "response_time_ms", "latency", "error", "requests", and "requests_per_sec". + :vartype client_metric: str or ~microsoft.loadtestservice.models.PFMetrics + :ivar aggregate: The aggregation function to be applied on the client metric. Allowed functions + + * ‘percentage’ - for error metric , ‘avg’, percentiles like ‘p50’, ‘p90’, & so on, ‘min’, + ‘max’ - for response_time_ms and latency metric, ‘avg’ - for requests_per_sec, + ‘count’ - for requests. Known values are: "count", "percentage", "avg", "p50", "p75", "p90", + "p95", "p96", "p97", "p98", "p99", "p99.9", "p99.99", "min", and "max". + :vartype aggregate: str or ~microsoft.loadtestservice.models.PassFailAggregationFunction + :ivar condition: The comparison operator. Supported types ‘>’, ‘<’. + :vartype condition: str + :ivar request_name: Request name for which the Pass fail criteria has to be applied. + :vartype request_name: str + :ivar value: The value to compare with the client metric. Allowed values - ‘error : [0.0 , + 100.0] unit- % ’, response_time_ms and latency : any integer value unit- ms. + :vartype value: float + :ivar action: Action taken after the threshold is met. Default is ‘continue’. Known values are: + "continue" and "stop". + :vartype action: str or ~microsoft.loadtestservice.models.PassFailAction + :ivar actual_value: The actual value of the client metric for the test run. + :vartype actual_value: float + :ivar result: Outcome of the test run. Known values are: "passed", "undetermined", and + "failed". + :vartype result: str or ~microsoft.loadtestservice.models.PassFailResult + """ + + client_metric: Optional[Union[str, "_models.PFMetrics"]] = rest_field( + name="clientMetric", visibility=["read", "create", "update", "delete", "query"] + ) + """The client metric on which the criteria should be applied. Known values are: + \"response_time_ms\", \"latency\", \"error\", \"requests\", and \"requests_per_sec\".""" + aggregate: Optional[Union[str, "_models.PassFailAggregationFunction"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The aggregation function to be applied on the client metric. Allowed functions + + * ‘percentage’ - for error metric , ‘avg’, percentiles like ‘p50’, ‘p90’, & so on, ‘min’, + ‘max’ - for response_time_ms and latency metric, ‘avg’ - for requests_per_sec, + ‘count’ - for requests. Known values are: \"count\", \"percentage\", \"avg\", \"p50\", \"p75\", + \"p90\", \"p95\", \"p96\", \"p97\", \"p98\", \"p99\", \"p99.9\", \"p99.99\", \"min\", and + \"max\".""" + condition: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The comparison operator. Supported types ‘>’, ‘<’.""" + request_name: Optional[str] = rest_field( + name="requestName", visibility=["read", "create", "update", "delete", "query"] + ) + """Request name for which the Pass fail criteria has to be applied.""" + value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value to compare with the client metric. Allowed values - ‘error : [0.0 , + 100.0] unit- % ’, response_time_ms and latency : any integer value unit- ms.""" + action: Optional[Union[str, "_models.PassFailAction"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Action taken after the threshold is met. Default is ‘continue’. Known values are: \"continue\" + and \"stop\".""" + actual_value: Optional[float] = rest_field(name="actualValue", visibility=["read"]) + """The actual value of the client metric for the test run.""" + result: Optional[Union[str, "_models.PassFailResult"]] = rest_field(visibility=["read"]) + """Outcome of the test run. Known values are: \"passed\", \"undetermined\", and \"failed\".""" + + @overload + def __init__( + self, + *, + client_metric: Optional[Union[str, "_models.PFMetrics"]] = None, + aggregate: Optional[Union[str, "_models.PassFailAggregationFunction"]] = None, + condition: Optional[str] = None, + request_name: Optional[str] = None, + value: Optional[float] = None, + action: Optional[Union[str, "_models.PassFailAction"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PassFailServerMetric(_Model): + """Pass fail server metric. + + :ivar resource_id: The resource id of the resource emitting the metric. Required. + :vartype resource_id: str + :ivar metric_namespace: The server metric namespace. Required. + :vartype metric_namespace: str + :ivar metric_name: The server metric name. Required. + :vartype metric_name: str + :ivar aggregation: Aggregation Type. Required. + :vartype aggregation: str + :ivar condition: The comparison operator. Supported types ‘>’, ‘<’. Required. + :vartype condition: str + :ivar value: The value to compare with the server metric. Required. + :vartype value: float + :ivar action: Action taken after the threshold is met. Default is ‘continue’. Known values are: + "continue" and "stop". + :vartype action: str or ~microsoft.loadtestservice.models.PassFailAction + :ivar actual_value: The actual value of the server metric. + :vartype actual_value: float + :ivar result: Outcome of the test run. Known values are: "passed", "undetermined", and + "failed". + :vartype result: str or ~microsoft.loadtestservice.models.PassFailResult + """ + + resource_id: str = rest_field(name="resourceId", visibility=["read", "create", "update", "delete", "query"]) + """The resource id of the resource emitting the metric. Required.""" + metric_namespace: str = rest_field( + name="metricNamespace", visibility=["read", "create", "update", "delete", "query"] + ) + """The server metric namespace. Required.""" + metric_name: str = rest_field(name="metricName", visibility=["read", "create", "update", "delete", "query"]) + """The server metric name. Required.""" + aggregation: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Aggregation Type. Required.""" + condition: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The comparison operator. Supported types ‘>’, ‘<’. Required.""" + value: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value to compare with the server metric. Required.""" + action: Optional[Union[str, "_models.PassFailAction"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Action taken after the threshold is met. Default is ‘continue’. Known values are: \"continue\" + and \"stop\".""" + actual_value: Optional[float] = rest_field(name="actualValue", visibility=["read"]) + """The actual value of the server metric.""" + result: Optional[Union[str, "_models.PassFailResult"]] = rest_field(visibility=["read"]) + """Outcome of the test run. Known values are: \"passed\", \"undetermined\", and \"failed\".""" + + @overload + def __init__( + self, + *, + resource_id: str, + metric_namespace: str, + metric_name: str, + aggregation: str, + condition: str, + value: float, + action: Optional[Union[str, "_models.PassFailAction"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RegionalConfiguration(_Model): + """Region distribution configuration for the load test. + + :ivar engine_instances: The number of engine instances to execute load test in specified + region. Supported values are in range of 1-400. Required. + :vartype engine_instances: int + :ivar region: Azure region name. + The region name should of format accepted by ARM, and should be a region supported by Azure + Load Testing. For example, East US should be passed as "eastus". + The region name must match one of the strings in the "Name" column returned from running the + "az account list-locations -o table" Azure CLI command. Required. + :vartype region: str + """ + + engine_instances: int = rest_field( + name="engineInstances", visibility=["read", "create", "update", "delete", "query"] + ) + """ The number of engine instances to execute load test in specified region. Supported values are + in range of 1-400. Required.""" + region: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Azure region name. + The region name should of format accepted by ARM, and should be a region supported by Azure + Load Testing. For example, East US should be passed as \"eastus\". + The region name must match one of the strings in the \"Name\" column returned from running the + \"az account list-locations -o table\" Azure CLI command. Required.""" + + @overload + def __init__( + self, + *, + engine_instances: int, + region: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResourceMetric(_Model): + """Associated metric definition for particular metrics of the azure resource ( + Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_). + + :ivar id: Unique name for metric. + :vartype id: str + :ivar resource_id: Azure resource id. Required. + :vartype resource_id: str + :ivar metric_namespace: Metric name space. Required. + :vartype metric_namespace: str + :ivar display_description: Metric description. + :vartype display_description: str + :ivar name: The invariant value of metric name. Required. + :vartype name: str + :ivar aggregation: Metric aggregation. Required. + :vartype aggregation: str + :ivar unit: Metric unit. + :vartype unit: str + :ivar resource_type: Azure resource type. Required. + :vartype resource_type: str + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """Unique name for metric.""" + resource_id: str = rest_field(name="resourceId", visibility=["read", "create", "update", "delete", "query"]) + """Azure resource id. Required.""" + metric_namespace: str = rest_field( + name="metricNamespace", visibility=["read", "create", "update", "delete", "query"] + ) + """Metric name space. Required.""" + display_description: Optional[str] = rest_field( + name="displayDescription", visibility=["read", "create", "update", "delete", "query"] + ) + """Metric description.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The invariant value of metric name. Required.""" + aggregation: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metric aggregation. Required.""" + unit: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metric unit.""" + resource_type: str = rest_field(name="resourceType", visibility=["read", "create", "update", "delete", "query"]) + """Azure resource type. Required.""" + + @overload + def __init__( + self, + *, + resource_id: str, + metric_namespace: str, + name: str, + aggregation: str, + resource_type: str, + display_description: Optional[str] = None, + unit: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Secret(_Model): + """Secret. + + :ivar value: The value of the secret for the respective type. + :vartype value: str + :ivar type: Type of secret. Known values are: "AKV_SECRET_URI" and "SECRET_VALUE". + :vartype type: str or ~microsoft.loadtestservice.models.SecretType + """ + + value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value of the secret for the respective type.""" + type: Optional[Union[str, "_models.SecretType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Type of secret. Known values are: \"AKV_SECRET_URI\" and \"SECRET_VALUE\".""" + + @overload + def __init__( + self, + *, + value: Optional[str] = None, + type: Optional[Union[str, "_models.SecretType"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Test(_Model): + """Load test model. + + :ivar pass_fail_criteria: Pass fail criteria for a test. + :vartype pass_fail_criteria: ~microsoft.loadtestservice.models.PassFailCriteria + :ivar auto_stop_criteria: Auto stop criteria for a test. This will automatically stop a load + test if the error percentage is high for a certain time window. + :vartype auto_stop_criteria: ~microsoft.loadtestservice.models.AutoStopCriteria + :ivar secrets: Secrets can be stored in an Azure Key Vault or any other secret store. If the + secret is stored in an Azure Key Vault, the value should be the secret + identifier and the type should be AKV_SECRET_URI. If the secret is stored + elsewhere, the secret value should be provided directly and the type should be + SECRET_VALUE. + :vartype secrets: dict[str, ~microsoft.loadtestservice.models.Secret] + :ivar certificate: Certificates metadata. + :vartype certificate: ~microsoft.loadtestservice.models.CertificateMetadata + :ivar environment_variables: Environment variables which are defined as a set of + pairs. + :vartype environment_variables: dict[str, str] + :ivar load_test_configuration: The load test configuration. + :vartype load_test_configuration: ~microsoft.loadtestservice.models.LoadTestConfiguration + :ivar baseline_test_run_id: Id of the test run to be marked as baseline to view trends of + client-side metrics from recent test runs. + :vartype baseline_test_run_id: str + :ivar input_artifacts: The input artifacts for the test. + :vartype input_artifacts: ~microsoft.loadtestservice.models.TestInputArtifacts + :ivar test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :vartype test_id: str + :ivar description: The test description. + :vartype description: str + :ivar display_name: Display name of a test. + :vartype display_name: str + :ivar subnet_id: Subnet ID on which the load test instances should run. + :vartype subnet_id: str + :ivar kind: Kind of test. Known values are: "URL", "JMX", and "Locust". + :vartype kind: str or ~microsoft.loadtestservice.models.TestKind + :ivar public_ip_disabled: Inject load test engines without deploying public IP for outbound + access. + :vartype public_ip_disabled: bool + :ivar keyvault_reference_identity_type: Type of the managed identity referencing the Key vault. + :vartype keyvault_reference_identity_type: str + :ivar keyvault_reference_identity_id: Resource Id of the managed identity referencing the Key + vault. + :vartype keyvault_reference_identity_id: str + :ivar metrics_reference_identity_type: Type of the managed identity referencing the metrics. + Known values are: "SystemAssigned" and "UserAssigned". + :vartype metrics_reference_identity_type: str or + ~microsoft.loadtestservice.models.ManagedIdentityType + :ivar metrics_reference_identity_id: Resource Id of the managed identity referencing the + metrics. + :vartype metrics_reference_identity_id: str + :ivar engine_built_in_identity_type: Type of the managed identity built in load test engines. + Known values are: "SystemAssigned" and "UserAssigned". + :vartype engine_built_in_identity_type: str or + ~microsoft.loadtestservice.models.ManagedIdentityType + :ivar engine_built_in_identity_ids: Resource Ids of the managed identity built in to load test + engines. Required if engineBuiltInIdentityType is UserAssigned. + :vartype engine_built_in_identity_ids: list[str] + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + pass_fail_criteria: Optional["_models.PassFailCriteria"] = rest_field( + name="passFailCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Pass fail criteria for a test.""" + auto_stop_criteria: Optional["_models.AutoStopCriteria"] = rest_field( + name="autoStopCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Auto stop criteria for a test. This will automatically stop a load test if the error percentage + is high for a certain time window.""" + secrets: Optional[Dict[str, "_models.Secret"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Secrets can be stored in an Azure Key Vault or any other secret store. If the + secret is stored in an Azure Key Vault, the value should be the secret + identifier and the type should be AKV_SECRET_URI. If the secret is stored + elsewhere, the secret value should be provided directly and the type should be + SECRET_VALUE.""" + certificate: Optional["_models.CertificateMetadata"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Certificates metadata.""" + environment_variables: Optional[Dict[str, str]] = rest_field( + name="environmentVariables", visibility=["read", "create", "update", "delete", "query"] + ) + """Environment variables which are defined as a set of pairs.""" + load_test_configuration: Optional["_models.LoadTestConfiguration"] = rest_field( + name="loadTestConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """The load test configuration.""" + baseline_test_run_id: Optional[str] = rest_field( + name="baselineTestRunId", visibility=["read", "create", "update", "delete", "query"] + ) + """Id of the test run to be marked as baseline to view trends of client-side metrics from recent + test runs.""" + input_artifacts: Optional["_models.TestInputArtifacts"] = rest_field(name="inputArtifacts", visibility=["read"]) + """The input artifacts for the test.""" + test_id: str = rest_field(name="testId", visibility=["read"]) + """Unique test identifier for the load test, must contain only lower-case alphabetic, numeric, + underscore or hyphen characters. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The test description.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name of a test.""" + subnet_id: Optional[str] = rest_field(name="subnetId", visibility=["read", "create", "update", "delete", "query"]) + """Subnet ID on which the load test instances should run.""" + kind: Optional[Union[str, "_models.TestKind"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Kind of test. Known values are: \"URL\", \"JMX\", and \"Locust\".""" + public_ip_disabled: Optional[bool] = rest_field( + name="publicIPDisabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Inject load test engines without deploying public IP for outbound access.""" + keyvault_reference_identity_type: Optional[str] = rest_field( + name="keyvaultReferenceIdentityType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of the managed identity referencing the Key vault.""" + keyvault_reference_identity_id: Optional[str] = rest_field( + name="keyvaultReferenceIdentityId", visibility=["read", "create", "update", "delete", "query"] + ) + """Resource Id of the managed identity referencing the Key vault.""" + metrics_reference_identity_type: Optional[Union[str, "_models.ManagedIdentityType"]] = rest_field( + name="metricsReferenceIdentityType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of the managed identity referencing the metrics. Known values are: \"SystemAssigned\" and + \"UserAssigned\".""" + metrics_reference_identity_id: Optional[str] = rest_field( + name="metricsReferenceIdentityId", visibility=["read", "create", "update", "delete", "query"] + ) + """Resource Id of the managed identity referencing the metrics.""" + engine_built_in_identity_type: Optional[Union[str, "_models.ManagedIdentityType"]] = rest_field( + name="engineBuiltInIdentityType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of the managed identity built in load test engines. Known values are: \"SystemAssigned\" + and \"UserAssigned\".""" + engine_built_in_identity_ids: Optional[List[str]] = rest_field( + name="engineBuiltInIdentityIds", visibility=["read", "create", "update", "delete", "query"] + ) + """Resource Ids of the managed identity built in to load test engines. Required if + engineBuiltInIdentityType is UserAssigned.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + pass_fail_criteria: Optional["_models.PassFailCriteria"] = None, + auto_stop_criteria: Optional["_models.AutoStopCriteria"] = None, + secrets: Optional[Dict[str, "_models.Secret"]] = None, + certificate: Optional["_models.CertificateMetadata"] = None, + environment_variables: Optional[Dict[str, str]] = None, + load_test_configuration: Optional["_models.LoadTestConfiguration"] = None, + baseline_test_run_id: Optional[str] = None, + description: Optional[str] = None, + display_name: Optional[str] = None, + subnet_id: Optional[str] = None, + kind: Optional[Union[str, "_models.TestKind"]] = None, + public_ip_disabled: Optional[bool] = None, + keyvault_reference_identity_type: Optional[str] = None, + keyvault_reference_identity_id: Optional[str] = None, + metrics_reference_identity_type: Optional[Union[str, "_models.ManagedIdentityType"]] = None, + metrics_reference_identity_id: Optional[str] = None, + engine_built_in_identity_type: Optional[Union[str, "_models.ManagedIdentityType"]] = None, + engine_built_in_identity_ids: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestAppComponents(_Model): + """Test app components. + + :ivar components: Azure resource collection { resource id (fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}) + : resource object }. Required. + :vartype components: dict[str, ~microsoft.loadtestservice.models.AppComponent] + :ivar test_id: Test identifier. + :vartype test_id: str + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + components: Dict[str, "_models.AppComponent"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource collection { resource id (fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}) + : resource object }. Required.""" + test_id: Optional[str] = rest_field(name="testId", visibility=["read"]) + """Test identifier.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + components: Dict[str, "_models.AppComponent"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestFileInfo(_Model): + """Test file info. + + :ivar file_name: Name of the file. Required. + :vartype file_name: str + :ivar url: File URL. + :vartype url: str + :ivar file_type: File type. Known values are: "JMX_FILE", "USER_PROPERTIES", + "ADDITIONAL_ARTIFACTS", "ZIPPED_ARTIFACTS", "URL_TEST_CONFIG", and "TEST_SCRIPT". + :vartype file_type: str or ~microsoft.loadtestservice.models.FileType + :ivar expire_date_time: Expiry time of the file (RFC 3339 literal format). + :vartype expire_date_time: ~datetime.datetime + :ivar validation_status: Validation status of the file. Known values are: "NOT_VALIDATED", + "VALIDATION_SUCCESS", "VALIDATION_FAILURE", "VALIDATION_INITIATED", and + "VALIDATION_NOT_REQUIRED". + :vartype validation_status: str or ~microsoft.loadtestservice.models.FileValidationStatus + :ivar validation_failure_details: Validation failure error details. + :vartype validation_failure_details: str + """ + + file_name: str = rest_field(name="fileName", visibility=["read", "create", "update", "delete", "query"]) + """Name of the file. Required.""" + url: Optional[str] = rest_field(visibility=["read"]) + """File URL.""" + file_type: Optional[Union[str, "_models.FileType"]] = rest_field(name="fileType", visibility=["read"]) + """File type. Known values are: \"JMX_FILE\", \"USER_PROPERTIES\", \"ADDITIONAL_ARTIFACTS\", + \"ZIPPED_ARTIFACTS\", \"URL_TEST_CONFIG\", and \"TEST_SCRIPT\".""" + expire_date_time: Optional[datetime.datetime] = rest_field( + name="expireDateTime", visibility=["read"], format="rfc3339" + ) + """Expiry time of the file (RFC 3339 literal format).""" + validation_status: Optional[Union[str, "_models.FileValidationStatus"]] = rest_field( + name="validationStatus", visibility=["read"] + ) + """Validation status of the file. Known values are: \"NOT_VALIDATED\", \"VALIDATION_SUCCESS\", + \"VALIDATION_FAILURE\", \"VALIDATION_INITIATED\", and \"VALIDATION_NOT_REQUIRED\".""" + validation_failure_details: Optional[str] = rest_field(name="validationFailureDetails", visibility=["read"]) + """Validation failure error details.""" + + @overload + def __init__( + self, + *, + file_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestInputArtifacts(_Model): + """The input artifacts for the test. + + :ivar config_file_info: The load test YAML file that contains the the test configuration. + :vartype config_file_info: ~microsoft.loadtestservice.models.TestFileInfo + :ivar test_script_file_info: The test script file for the test run. + :vartype test_script_file_info: ~microsoft.loadtestservice.models.TestFileInfo + :ivar user_prop_file_info: The user properties file. + :vartype user_prop_file_info: ~microsoft.loadtestservice.models.TestFileInfo + :ivar input_artifacts_zip_file_info: The zip file with all input artifacts. + :vartype input_artifacts_zip_file_info: ~microsoft.loadtestservice.models.TestFileInfo + :ivar url_test_config_file_info: The config json file for url based test. + :vartype url_test_config_file_info: ~microsoft.loadtestservice.models.TestFileInfo + :ivar additional_file_info: Additional supported files for the test run. + :vartype additional_file_info: list[~microsoft.loadtestservice.models.TestFileInfo] + """ + + config_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="configFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The load test YAML file that contains the the test configuration.""" + test_script_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="testScriptFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The test script file for the test run.""" + user_prop_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="userPropFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The user properties file.""" + input_artifacts_zip_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="inputArtifactsZipFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The zip file with all input artifacts.""" + url_test_config_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="urlTestConfigFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The config json file for url based test.""" + additional_file_info: Optional[List["_models.TestFileInfo"]] = rest_field( + name="additionalFileInfo", visibility=["read"] + ) + """Additional supported files for the test run.""" + + @overload + def __init__( + self, + *, + config_file_info: Optional["_models.TestFileInfo"] = None, + test_script_file_info: Optional["_models.TestFileInfo"] = None, + user_prop_file_info: Optional["_models.TestFileInfo"] = None, + input_artifacts_zip_file_info: Optional["_models.TestFileInfo"] = None, + url_test_config_file_info: Optional["_models.TestFileInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestProfile(_Model): + """Test Profile Model. + + :ivar test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :vartype test_profile_id: str + :ivar display_name: Display name of the test profile. + :vartype display_name: str + :ivar description: Description for the test profile. + :vartype description: str + :ivar test_id: Associated test ID for the test profile. This property is required for creating + a Test Profile and it's not allowed to be updated. + :vartype test_id: str + :ivar target_resource_id: Target resource ID on which the test profile is created. This + property is required for creating a Test Profile and it's not allowed to be updated. + :vartype target_resource_id: str + :ivar target_resource_configurations: Configurations of the target resource on which testing + would be done. + :vartype target_resource_configurations: + ~microsoft.loadtestservice.models.TargetResourceConfigurations + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_profile_id: str = rest_field(name="testProfileId", visibility=["read"]) + """Unique identifier for the test profile, must contain only lower-case alphabetic, numeric, + underscore or hyphen characters. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name of the test profile.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description for the test profile.""" + test_id: Optional[str] = rest_field(name="testId", visibility=["read", "create"]) + """Associated test ID for the test profile. This property is required for creating a Test Profile + and it's not allowed to be updated.""" + target_resource_id: Optional[str] = rest_field(name="targetResourceId", visibility=["read", "create"]) + """Target resource ID on which the test profile is created. This property is required for creating + a Test Profile and it's not allowed to be updated.""" + target_resource_configurations: Optional["_models.TargetResourceConfigurations"] = rest_field( + name="targetResourceConfigurations", visibility=["read", "create", "update", "delete", "query"] + ) + """Configurations of the target resource on which testing would be done.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + display_name: Optional[str] = None, + description: Optional[str] = None, + test_id: Optional[str] = None, + target_resource_id: Optional[str] = None, + target_resource_configurations: Optional["_models.TargetResourceConfigurations"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestProfileRun(_Model): + """Test Profile Run model. + + :ivar test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :vartype test_profile_run_id: str + :ivar display_name: Display name for the test profile run. + :vartype display_name: str + :ivar description: The test profile run description. + :vartype description: str + :ivar test_profile_id: Associated test profile ID for the test profile run. This is required to + create a test profile run and can't be updated. + :vartype test_profile_id: str + :ivar target_resource_id: Target resource ID on which the test profile run is created. + :vartype target_resource_id: str + :ivar target_resource_configurations: Configurations of the target resource on which the test + profile ran. + :vartype target_resource_configurations: + ~microsoft.loadtestservice.models.TargetResourceConfigurations + :ivar status: The test profile run status. Known values are: "ACCEPTED", "NOTSTARTED", + "EXECUTING", "DONE", "CANCELLING", "CANCELLED", and "FAILED". + :vartype status: str or ~microsoft.loadtestservice.models.TestProfileRunStatus + :ivar error_details: Error details if there is any failure in test profile run. These errors + are specific to the Test Profile Run. + :vartype error_details: list[~microsoft.loadtestservice.models.ErrorDetails] + :ivar start_date_time: The test profile run start DateTime(RFC 3339 literal format). + :vartype start_date_time: ~datetime.datetime + :ivar end_date_time: The test profile run end DateTime(RFC 3339 literal format). + :vartype end_date_time: ~datetime.datetime + :ivar duration_in_seconds: Test profile run duration in seconds. + :vartype duration_in_seconds: int + :ivar test_run_details: Details of the test runs ran as part of the test profile run. + Key is the testRunId of the corresponding testRun. + :vartype test_run_details: dict[str, ~microsoft.loadtestservice.models.TestRunDetail] + :ivar recommendations: Recommendations provided based on a successful test profile run. + :vartype recommendations: list[~microsoft.loadtestservice.models.TestProfileRunRecommendation] + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_profile_run_id: str = rest_field(name="testProfileRunId", visibility=["read"]) + """Unique identifier for the test profile run, must contain only lower-case alphabetic, numeric, + underscore or hyphen characters. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name for the test profile run.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The test profile run description.""" + test_profile_id: Optional[str] = rest_field(name="testProfileId", visibility=["read", "create"]) + """Associated test profile ID for the test profile run. This is required to create a test profile + run and can't be updated.""" + target_resource_id: Optional[str] = rest_field(name="targetResourceId", visibility=["read"]) + """Target resource ID on which the test profile run is created.""" + target_resource_configurations: Optional["_models.TargetResourceConfigurations"] = rest_field( + name="targetResourceConfigurations", visibility=["read"] + ) + """Configurations of the target resource on which the test profile ran.""" + status: Optional[Union[str, "_models.TestProfileRunStatus"]] = rest_field(visibility=["read"]) + """The test profile run status. Known values are: \"ACCEPTED\", \"NOTSTARTED\", \"EXECUTING\", + \"DONE\", \"CANCELLING\", \"CANCELLED\", and \"FAILED\".""" + error_details: Optional[List["_models.ErrorDetails"]] = rest_field(name="errorDetails", visibility=["read"]) + """Error details if there is any failure in test profile run. These errors are specific to the + Test Profile Run.""" + start_date_time: Optional[datetime.datetime] = rest_field( + name="startDateTime", visibility=["read"], format="rfc3339" + ) + """The test profile run start DateTime(RFC 3339 literal format).""" + end_date_time: Optional[datetime.datetime] = rest_field(name="endDateTime", visibility=["read"], format="rfc3339") + """The test profile run end DateTime(RFC 3339 literal format).""" + duration_in_seconds: Optional[int] = rest_field(name="durationInSeconds", visibility=["read"]) + """Test profile run duration in seconds.""" + test_run_details: Optional[Dict[str, "_models.TestRunDetail"]] = rest_field( + name="testRunDetails", visibility=["read"] + ) + """Details of the test runs ran as part of the test profile run. + Key is the testRunId of the corresponding testRun.""" + recommendations: Optional[List["_models.TestProfileRunRecommendation"]] = rest_field(visibility=["read"]) + """Recommendations provided based on a successful test profile run.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + display_name: Optional[str] = None, + description: Optional[str] = None, + test_profile_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestProfileRunRecommendation(_Model): + """A recommendation object that provides a list of configuration that optimizes its category. + + :ivar category: Category of the recommendation. Required. Known values are: + "ThroughputOptimized" and "CostOptimized". + :vartype category: str or ~microsoft.loadtestservice.models.RecommendationCategory + :ivar configurations: List of configurations IDs for which the recommendation is applicable. + These are a subset of the provided target resource configurations. + :vartype configurations: list[str] + """ + + category: Union[str, "_models.RecommendationCategory"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Category of the recommendation. Required. Known values are: \"ThroughputOptimized\" and + \"CostOptimized\".""" + configurations: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of configurations IDs for which the recommendation is applicable. These are a subset of + the provided target resource configurations.""" + + @overload + def __init__( + self, + *, + category: Union[str, "_models.RecommendationCategory"], + configurations: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRun(_Model): + """Load test run model. + + :ivar test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :vartype test_run_id: str + :ivar pass_fail_criteria: Pass fail criteria for a test. + :vartype pass_fail_criteria: ~microsoft.loadtestservice.models.PassFailCriteria + :ivar auto_stop_criteria: Auto stop criteria for a test. This will automatically stop a load + test if the error percentage is high for a certain time window. + :vartype auto_stop_criteria: ~microsoft.loadtestservice.models.AutoStopCriteria + :ivar secrets: Secrets can be stored in an Azure Key Vault or any other secret store. If the + secret is stored in an Azure Key Vault, the value should be the secret + identifier and the type should be AKV_SECRET_URI. If the secret is stored + elsewhere, the secret value should be provided directly and the type should be + SECRET_VALUE. + :vartype secrets: dict[str, ~microsoft.loadtestservice.models.Secret] + :ivar certificate: Certificates metadata. + :vartype certificate: ~microsoft.loadtestservice.models.CertificateMetadata + :ivar environment_variables: Environment variables which are defined as a set of + pairs. + :vartype environment_variables: dict[str, str] + :ivar error_details: Error details if there is any failure in load test run. + :vartype error_details: list[~microsoft.loadtestservice.models.ErrorDetails] + :ivar test_run_statistics: Test run statistics. Key is the sampler name and value is the set of + statistics for performance metrics like response time, throughput, etc. from the load test run. + The sampler name is the same as the name mentioned in the test script. + Sampler name "Total" represents the aggregated statistics of all the samplers. + :vartype test_run_statistics: dict[str, ~microsoft.loadtestservice.models.TestRunStatistics] + :ivar regional_statistics: Regional statistics. Key is the Azure region name and value is the + test run statistics. + The region name should of format accepted by ARM, and should be a region supported by Azure + Load Testing. For example, East US should be passed as "eastus". + The region name must match one of the strings in the "Name" column returned from running the + "az account list-locations -o table" Azure CLI command. + :vartype regional_statistics: dict[str, ~microsoft.loadtestservice.models.TestRunStatistics] + :ivar load_test_configuration: The load test configuration. + :vartype load_test_configuration: ~microsoft.loadtestservice.models.LoadTestConfiguration + :ivar test_artifacts: Collection of test run artifacts. + :vartype test_artifacts: ~microsoft.loadtestservice.models.TestRunArtifacts + :ivar test_result: Test result for pass/Fail criteria used during the test run. Known values + are: "PASSED", "NOT_APPLICABLE", and "FAILED". + :vartype test_result: str or ~microsoft.loadtestservice.models.PassFailTestResult + :ivar virtual_users: Number of virtual users, for which test has been run. + :vartype virtual_users: int + :ivar display_name: Display name of a testRun. + :vartype display_name: str + :ivar test_id: Associated test Id. + :vartype test_id: str + :ivar description: The test run description. + :vartype description: str + :ivar status: The test run status. Known values are: "ACCEPTED", "NOTSTARTED", "PROVISIONING", + "PROVISIONED", "CONFIGURING", "CONFIGURED", "EXECUTING", "EXECUTED", "DEPROVISIONING", + "DEPROVISIONED", "DONE", "CANCELLING", "CANCELLED", "FAILED", "VALIDATION_SUCCESS", and + "VALIDATION_FAILURE". + :vartype status: str or ~microsoft.loadtestservice.models.TestRunStatus + :ivar start_date_time: The test run start DateTime(RFC 3339 literal format). + :vartype start_date_time: ~datetime.datetime + :ivar end_date_time: The test run end DateTime(RFC 3339 literal format). + :vartype end_date_time: ~datetime.datetime + :ivar executed_date_time: Test run initiated time. + :vartype executed_date_time: ~datetime.datetime + :ivar portal_url: Portal url. + :vartype portal_url: str + :ivar duration: Test run duration in milliseconds. + :vartype duration: int + :ivar virtual_user_hours: Virtual user hours consumed by the test run. + :vartype virtual_user_hours: float + :ivar subnet_id: Subnet ID on which the load test instances should run. + :vartype subnet_id: str + :ivar kind: Type of test. Known values are: "URL", "JMX", and "Locust". + :vartype kind: str or ~microsoft.loadtestservice.models.TestKind + :ivar request_data_level: Request data collection level for test run. Known values are: "NONE" + and "ERRORS". + :vartype request_data_level: str or ~microsoft.loadtestservice.models.RequestDataLevel + :ivar debug_logs_enabled: Enable or disable debug level logging. True if debug logs are enabled + for the test run. False otherwise. + :vartype debug_logs_enabled: bool + :ivar public_ip_disabled: Inject load test engines without deploying public IP for outbound + access. + :vartype public_ip_disabled: bool + :ivar created_by_type: The type of the entity that created the test run. (E.x. User, + ScheduleTrigger, etc). Known values are: "User" and "ScheduledTrigger". + :vartype created_by_type: str or ~microsoft.loadtestservice.models.CreatedByType + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_run_id: str = rest_field(name="testRunId", visibility=["read"]) + """Unique test run identifier for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required.""" + pass_fail_criteria: Optional["_models.PassFailCriteria"] = rest_field( + name="passFailCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Pass fail criteria for a test.""" + auto_stop_criteria: Optional["_models.AutoStopCriteria"] = rest_field( + name="autoStopCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Auto stop criteria for a test. This will automatically stop a load test if the error percentage + is high for a certain time window.""" + secrets: Optional[Dict[str, "_models.Secret"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Secrets can be stored in an Azure Key Vault or any other secret store. If the + secret is stored in an Azure Key Vault, the value should be the secret + identifier and the type should be AKV_SECRET_URI. If the secret is stored + elsewhere, the secret value should be provided directly and the type should be + SECRET_VALUE.""" + certificate: Optional["_models.CertificateMetadata"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Certificates metadata.""" + environment_variables: Optional[Dict[str, str]] = rest_field( + name="environmentVariables", visibility=["read", "create", "update", "delete", "query"] + ) + """Environment variables which are defined as a set of pairs.""" + error_details: Optional[List["_models.ErrorDetails"]] = rest_field(name="errorDetails", visibility=["read"]) + """Error details if there is any failure in load test run.""" + test_run_statistics: Optional[Dict[str, "_models.TestRunStatistics"]] = rest_field( + name="testRunStatistics", visibility=["read"] + ) + """Test run statistics. Key is the sampler name and value is the set of statistics for performance + metrics like response time, throughput, etc. from the load test run. + The sampler name is the same as the name mentioned in the test script. + Sampler name \"Total\" represents the aggregated statistics of all the samplers.""" + regional_statistics: Optional[Dict[str, "_models.TestRunStatistics"]] = rest_field( + name="regionalStatistics", visibility=["read"] + ) + """Regional statistics. Key is the Azure region name and value is the test run statistics. + The region name should of format accepted by ARM, and should be a region supported by Azure + Load Testing. For example, East US should be passed as \"eastus\". + The region name must match one of the strings in the \"Name\" column returned from running the + \"az account list-locations -o table\" Azure CLI command.""" + load_test_configuration: Optional["_models.LoadTestConfiguration"] = rest_field( + name="loadTestConfiguration", visibility=["read"] + ) + """The load test configuration.""" + test_artifacts: Optional["_models.TestRunArtifacts"] = rest_field(name="testArtifacts", visibility=["read"]) + """Collection of test run artifacts.""" + test_result: Optional[Union[str, "_models.PassFailTestResult"]] = rest_field(name="testResult", visibility=["read"]) + """Test result for pass/Fail criteria used during the test run. Known values are: \"PASSED\", + \"NOT_APPLICABLE\", and \"FAILED\".""" + virtual_users: Optional[int] = rest_field(name="virtualUsers", visibility=["read"]) + """Number of virtual users, for which test has been run.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name of a testRun.""" + test_id: Optional[str] = rest_field(name="testId", visibility=["read", "create", "update", "delete", "query"]) + """Associated test Id.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The test run description.""" + status: Optional[Union[str, "_models.TestRunStatus"]] = rest_field(visibility=["read"]) + """The test run status. Known values are: \"ACCEPTED\", \"NOTSTARTED\", \"PROVISIONING\", + \"PROVISIONED\", \"CONFIGURING\", \"CONFIGURED\", \"EXECUTING\", \"EXECUTED\", + \"DEPROVISIONING\", \"DEPROVISIONED\", \"DONE\", \"CANCELLING\", \"CANCELLED\", \"FAILED\", + \"VALIDATION_SUCCESS\", and \"VALIDATION_FAILURE\".""" + start_date_time: Optional[datetime.datetime] = rest_field( + name="startDateTime", visibility=["read"], format="rfc3339" + ) + """The test run start DateTime(RFC 3339 literal format).""" + end_date_time: Optional[datetime.datetime] = rest_field(name="endDateTime", visibility=["read"], format="rfc3339") + """The test run end DateTime(RFC 3339 literal format).""" + executed_date_time: Optional[datetime.datetime] = rest_field( + name="executedDateTime", visibility=["read"], format="rfc3339" + ) + """Test run initiated time.""" + portal_url: Optional[str] = rest_field(name="portalUrl", visibility=["read"]) + """Portal url.""" + duration: Optional[int] = rest_field(visibility=["read"]) + """Test run duration in milliseconds.""" + virtual_user_hours: Optional[float] = rest_field(name="virtualUserHours", visibility=["read"]) + """Virtual user hours consumed by the test run.""" + subnet_id: Optional[str] = rest_field(name="subnetId", visibility=["read"]) + """Subnet ID on which the load test instances should run.""" + kind: Optional[Union[str, "_models.TestKind"]] = rest_field(visibility=["read"]) + """Type of test. Known values are: \"URL\", \"JMX\", and \"Locust\".""" + request_data_level: Optional[Union[str, "_models.RequestDataLevel"]] = rest_field( + name="requestDataLevel", visibility=["read", "create", "update", "delete", "query"] + ) + """Request data collection level for test run. Known values are: \"NONE\" and \"ERRORS\".""" + debug_logs_enabled: Optional[bool] = rest_field( + name="debugLogsEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Enable or disable debug level logging. True if debug logs are enabled for the test run. False + otherwise.""" + public_ip_disabled: Optional[bool] = rest_field(name="publicIPDisabled", visibility=["read"]) + """Inject load test engines without deploying public IP for outbound access.""" + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="createdByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of the entity that created the test run. (E.x. User, ScheduleTrigger, etc). Known + values are: \"User\" and \"ScheduledTrigger\".""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + pass_fail_criteria: Optional["_models.PassFailCriteria"] = None, + auto_stop_criteria: Optional["_models.AutoStopCriteria"] = None, + secrets: Optional[Dict[str, "_models.Secret"]] = None, + certificate: Optional["_models.CertificateMetadata"] = None, + environment_variables: Optional[Dict[str, str]] = None, + display_name: Optional[str] = None, + test_id: Optional[str] = None, + description: Optional[str] = None, + request_data_level: Optional[Union[str, "_models.RequestDataLevel"]] = None, + debug_logs_enabled: Optional[bool] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunAppComponents(_Model): + """Test run app component. + + :ivar components: Azure resource collection { resource id (fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}) + : resource object }. Required. + :vartype components: dict[str, ~microsoft.loadtestservice.models.AppComponent] + :ivar test_run_id: Test run identifier. + :vartype test_run_id: str + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + components: Dict[str, "_models.AppComponent"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource collection { resource id (fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}) + : resource object }. Required.""" + test_run_id: Optional[str] = rest_field(name="testRunId", visibility=["read"]) + """Test run identifier.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + components: Dict[str, "_models.AppComponent"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunArtifacts(_Model): + """Collection of test run artifacts. + + :ivar input_artifacts: The input artifacts for the test run. + :vartype input_artifacts: ~microsoft.loadtestservice.models.TestRunInputArtifacts + :ivar output_artifacts: The output artifacts for the test run. + :vartype output_artifacts: ~microsoft.loadtestservice.models.TestRunOutputArtifacts + """ + + input_artifacts: Optional["_models.TestRunInputArtifacts"] = rest_field(name="inputArtifacts", visibility=["read"]) + """The input artifacts for the test run.""" + output_artifacts: Optional["_models.TestRunOutputArtifacts"] = rest_field( + name="outputArtifacts", visibility=["read", "create", "update", "delete", "query"] + ) + """The output artifacts for the test run.""" + + @overload + def __init__( + self, + *, + output_artifacts: Optional["_models.TestRunOutputArtifacts"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunDetail(_Model): + """Details of a particular test run for a test profile run. + + :ivar status: Status of the test run. Required. Known values are: "ACCEPTED", "NOTSTARTED", + "PROVISIONING", "PROVISIONED", "CONFIGURING", "CONFIGURED", "EXECUTING", "EXECUTED", + "DEPROVISIONING", "DEPROVISIONED", "DONE", "CANCELLING", "CANCELLED", "FAILED", + "VALIDATION_SUCCESS", and "VALIDATION_FAILURE". + :vartype status: str or ~microsoft.loadtestservice.models.TestRunStatus + :ivar configuration_id: ID of the configuration on which the test ran. Required. + :vartype configuration_id: str + :ivar properties: Key value pair of extra properties associated with the test run. Required. + :vartype properties: dict[str, str] + """ + + status: Union[str, "_models.TestRunStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Status of the test run. Required. Known values are: \"ACCEPTED\", \"NOTSTARTED\", + \"PROVISIONING\", \"PROVISIONED\", \"CONFIGURING\", \"CONFIGURED\", \"EXECUTING\", + \"EXECUTED\", \"DEPROVISIONING\", \"DEPROVISIONED\", \"DONE\", \"CANCELLING\", \"CANCELLED\", + \"FAILED\", \"VALIDATION_SUCCESS\", and \"VALIDATION_FAILURE\".""" + configuration_id: str = rest_field( + name="configurationId", visibility=["read", "create", "update", "delete", "query"] + ) + """ID of the configuration on which the test ran. Required.""" + properties: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Key value pair of extra properties associated with the test run. Required.""" + + @overload + def __init__( + self, + *, + status: Union[str, "_models.TestRunStatus"], + configuration_id: str, + properties: Dict[str, str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunFileInfo(_Model): + """Test run file info. + + :ivar file_name: Name of the file. Required. + :vartype file_name: str + :ivar url: File URL. + :vartype url: str + :ivar file_type: File type. Known values are: "JMX_FILE", "USER_PROPERTIES", + "ADDITIONAL_ARTIFACTS", "ZIPPED_ARTIFACTS", "URL_TEST_CONFIG", and "TEST_SCRIPT". + :vartype file_type: str or ~microsoft.loadtestservice.models.FileType + :ivar expire_date_time: Expiry time of the file (RFC 3339 literal format). + :vartype expire_date_time: ~datetime.datetime + :ivar validation_status: Validation status of the file. Known values are: "NOT_VALIDATED", + "VALIDATION_SUCCESS", "VALIDATION_FAILURE", "VALIDATION_INITIATED", and + "VALIDATION_NOT_REQUIRED". + :vartype validation_status: str or ~microsoft.loadtestservice.models.FileValidationStatus + :ivar validation_failure_details: Validation failure error details. + :vartype validation_failure_details: str + """ + + file_name: str = rest_field(name="fileName", visibility=["read", "create", "update", "delete", "query"]) + """Name of the file. Required.""" + url: Optional[str] = rest_field(visibility=["read"]) + """File URL.""" + file_type: Optional[Union[str, "_models.FileType"]] = rest_field(name="fileType", visibility=["read"]) + """File type. Known values are: \"JMX_FILE\", \"USER_PROPERTIES\", \"ADDITIONAL_ARTIFACTS\", + \"ZIPPED_ARTIFACTS\", \"URL_TEST_CONFIG\", and \"TEST_SCRIPT\".""" + expire_date_time: Optional[datetime.datetime] = rest_field( + name="expireDateTime", visibility=["read"], format="rfc3339" + ) + """Expiry time of the file (RFC 3339 literal format).""" + validation_status: Optional[Union[str, "_models.FileValidationStatus"]] = rest_field( + name="validationStatus", visibility=["read"] + ) + """Validation status of the file. Known values are: \"NOT_VALIDATED\", \"VALIDATION_SUCCESS\", + \"VALIDATION_FAILURE\", \"VALIDATION_INITIATED\", and \"VALIDATION_NOT_REQUIRED\".""" + validation_failure_details: Optional[str] = rest_field(name="validationFailureDetails", visibility=["read"]) + """Validation failure error details.""" + + @overload + def __init__( + self, + *, + file_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunInputArtifacts(_Model): + """The input artifacts for the test run. + + :ivar config_file_info: The load test YAML file that contains the the test configuration. + :vartype config_file_info: ~microsoft.loadtestservice.models.TestRunFileInfo + :ivar test_script_file_info: The test script file for the test run. + :vartype test_script_file_info: ~microsoft.loadtestservice.models.TestRunFileInfo + :ivar user_prop_file_info: The user properties file. + :vartype user_prop_file_info: ~microsoft.loadtestservice.models.TestRunFileInfo + :ivar input_artifacts_zip_file_info: The zip file for all input artifacts. + :vartype input_artifacts_zip_file_info: ~microsoft.loadtestservice.models.TestRunFileInfo + :ivar url_test_config_file_info: The config json file for url based test. + :vartype url_test_config_file_info: ~microsoft.loadtestservice.models.TestRunFileInfo + :ivar additional_file_info: Additional supported files for the test run. + :vartype additional_file_info: list[~microsoft.loadtestservice.models.TestRunFileInfo] + """ + + config_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="configFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The load test YAML file that contains the the test configuration.""" + test_script_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="testScriptFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The test script file for the test run.""" + user_prop_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="userPropFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The user properties file.""" + input_artifacts_zip_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="inputArtifactsZipFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The zip file for all input artifacts.""" + url_test_config_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="urlTestConfigFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The config json file for url based test.""" + additional_file_info: Optional[List["_models.TestRunFileInfo"]] = rest_field( + name="additionalFileInfo", visibility=["read"] + ) + """Additional supported files for the test run.""" + + @overload + def __init__( + self, + *, + config_file_info: Optional["_models.TestRunFileInfo"] = None, + test_script_file_info: Optional["_models.TestRunFileInfo"] = None, + user_prop_file_info: Optional["_models.TestRunFileInfo"] = None, + input_artifacts_zip_file_info: Optional["_models.TestRunFileInfo"] = None, + url_test_config_file_info: Optional["_models.TestRunFileInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunOutputArtifacts(_Model): + """The output artifacts for the test run. + + :ivar result_file_info: The test run results file. + :vartype result_file_info: ~microsoft.loadtestservice.models.TestRunFileInfo + :ivar logs_file_info: The test run report with metrics. + :vartype logs_file_info: ~microsoft.loadtestservice.models.TestRunFileInfo + :ivar artifacts_container_info: The container for test run artifacts. + :vartype artifacts_container_info: ~microsoft.loadtestservice.models.ArtifactsContainerInfo + :ivar report_file_info: The report file for the test run. + :vartype report_file_info: ~microsoft.loadtestservice.models.TestRunFileInfo + """ + + result_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="resultFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The test run results file.""" + logs_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="logsFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The test run report with metrics.""" + artifacts_container_info: Optional["_models.ArtifactsContainerInfo"] = rest_field( + name="artifactsContainerInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The container for test run artifacts.""" + report_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="reportFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The report file for the test run.""" + + @overload + def __init__( + self, + *, + result_file_info: Optional["_models.TestRunFileInfo"] = None, + logs_file_info: Optional["_models.TestRunFileInfo"] = None, + artifacts_container_info: Optional["_models.ArtifactsContainerInfo"] = None, + report_file_info: Optional["_models.TestRunFileInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunServerMetricsConfiguration(_Model): + """Test run server metrics configuration. + + :ivar test_run_id: Test run identifier. + :vartype test_run_id: str + :ivar metrics: Azure resource metrics collection {metric id : metrics object} (Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_ + for metric id). + :vartype metrics: dict[str, ~microsoft.loadtestservice.models.ResourceMetric] + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_run_id: Optional[str] = rest_field(name="testRunId", visibility=["read"]) + """Test run identifier.""" + metrics: Optional[Dict[str, "_models.ResourceMetric"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource metrics collection {metric id : metrics object} (Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_ + for metric id).""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + metrics: Optional[Dict[str, "_models.ResourceMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunStatistics(_Model): + """Test run statistics. + + :ivar transaction: Transaction name. + :vartype transaction: str + :ivar sample_count: Sampler count. + :vartype sample_count: float + :ivar error_count: Error count. + :vartype error_count: float + :ivar error_pct: Error percentage. + :vartype error_pct: float + :ivar mean_res_time: Mean response time. + :vartype mean_res_time: float + :ivar median_res_time: Median response time. + :vartype median_res_time: float + :ivar max_res_time: Max response time. + :vartype max_res_time: float + :ivar min_res_time: Minimum response time. + :vartype min_res_time: float + :ivar pct1_res_time: 90 percentile response time. + :vartype pct1_res_time: float + :ivar pct2_res_time: 95 percentile response time. + :vartype pct2_res_time: float + :ivar pct3_res_time: 99 percentile response time. + :vartype pct3_res_time: float + :ivar pct75_res_time: 75 percentile response time. + :vartype pct75_res_time: float + :ivar pct96_res_time: 96 percentile response time. + :vartype pct96_res_time: float + :ivar pct97_res_time: 97 percentile response time. + :vartype pct97_res_time: float + :ivar pct98_res_time: 98 percentile response time. + :vartype pct98_res_time: float + :ivar pct999_res_time: 99.9 percentile response time. + :vartype pct999_res_time: float + :ivar pct9999_res_time: 99.99 percentile response time. + :vartype pct9999_res_time: float + :ivar throughput: Throughput. + :vartype throughput: float + :ivar received_k_bytes_per_sec: Received network bytes. + :vartype received_k_bytes_per_sec: float + :ivar sent_k_bytes_per_sec: Send network bytes. + :vartype sent_k_bytes_per_sec: float + """ + + transaction: Optional[str] = rest_field(visibility=["read"]) + """Transaction name.""" + sample_count: Optional[float] = rest_field(name="sampleCount", visibility=["read"]) + """Sampler count.""" + error_count: Optional[float] = rest_field(name="errorCount", visibility=["read"]) + """Error count.""" + error_pct: Optional[float] = rest_field(name="errorPct", visibility=["read"]) + """Error percentage.""" + mean_res_time: Optional[float] = rest_field(name="meanResTime", visibility=["read"]) + """Mean response time.""" + median_res_time: Optional[float] = rest_field(name="medianResTime", visibility=["read"]) + """Median response time.""" + max_res_time: Optional[float] = rest_field(name="maxResTime", visibility=["read"]) + """Max response time.""" + min_res_time: Optional[float] = rest_field(name="minResTime", visibility=["read"]) + """Minimum response time.""" + pct1_res_time: Optional[float] = rest_field(name="pct1ResTime", visibility=["read"]) + """90 percentile response time.""" + pct2_res_time: Optional[float] = rest_field(name="pct2ResTime", visibility=["read"]) + """95 percentile response time.""" + pct3_res_time: Optional[float] = rest_field(name="pct3ResTime", visibility=["read"]) + """99 percentile response time.""" + pct75_res_time: Optional[float] = rest_field(name="pct75ResTime", visibility=["read"]) + """75 percentile response time.""" + pct96_res_time: Optional[float] = rest_field(name="pct96ResTime", visibility=["read"]) + """96 percentile response time.""" + pct97_res_time: Optional[float] = rest_field(name="pct97ResTime", visibility=["read"]) + """97 percentile response time.""" + pct98_res_time: Optional[float] = rest_field(name="pct98ResTime", visibility=["read"]) + """98 percentile response time.""" + pct999_res_time: Optional[float] = rest_field(name="pct999ResTime", visibility=["read"]) + """99.9 percentile response time.""" + pct9999_res_time: Optional[float] = rest_field(name="pct9999ResTime", visibility=["read"]) + """99.99 percentile response time.""" + throughput: Optional[float] = rest_field(visibility=["read"]) + """Throughput.""" + received_k_bytes_per_sec: Optional[float] = rest_field(name="receivedKBytesPerSec", visibility=["read"]) + """Received network bytes.""" + sent_k_bytes_per_sec: Optional[float] = rest_field(name="sentKBytesPerSec", visibility=["read"]) + """Send network bytes.""" + + +class TestServerMetricsConfiguration(_Model): + """Test server metrics configuration. + + :ivar test_id: Test identifier. + :vartype test_id: str + :ivar metrics: Azure resource metrics collection {metric id : metrics object} (Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_ + for metric id). Required. + :vartype metrics: dict[str, ~microsoft.loadtestservice.models.ResourceMetric] + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_id: Optional[str] = rest_field(name="testId", visibility=["read"]) + """Test identifier.""" + metrics: Dict[str, "_models.ResourceMetric"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource metrics collection {metric id : metrics object} (Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_ + for metric id). Required.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + metrics: Dict[str, "_models.ResourceMetric"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TimeSeriesElement(_Model): + """The time series returned when a data query is performed. + + :ivar data: An array of data points representing the metric values. + :vartype data: list[~microsoft.loadtestservice.models.MetricValue] + :ivar dimension_values: The dimension values. + :vartype dimension_values: list[~microsoft.loadtestservice.models.DimensionValue] + """ + + data: Optional[List["_models.MetricValue"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of data points representing the metric values.""" + dimension_values: Optional[List["_models.DimensionValue"]] = rest_field( + name="dimensionValues", visibility=["read", "create", "update", "delete", "query"] + ) + """The dimension values.""" + + @overload + def __init__( + self, + *, + data: Optional[List["_models.MetricValue"]] = None, + dimension_values: Optional[List["_models.DimensionValue"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_app_components_test.py b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_app_components_test.py index 79adb48c202b..8cfc28a1de29 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_app_components_test.py +++ b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_app_components_test.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test.py b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test.py index 36b793be030d..402294d5b89d 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test.py +++ b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test_profile.py b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test_profile.py index 6d19b26a3aca..b49976147e7c 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test_profile.py +++ b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test_profile.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -59,9 +60,9 @@ "config2": { "instanceMemoryMB": 4096, "httpConcurrency": 100, - } - } - } + }, + }, + }, }, ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/samples/upload_test_file.py b/sdk/loadtesting/azure-developer-loadtesting/samples/upload_test_file.py index ed8380e6ca39..1c8c7fbe5d0e 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/samples/upload_test_file.py +++ b/sdk/loadtesting/azure-developer-loadtesting/samples/upload_test_file.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/loadtesting/azure-developer-loadtesting/setup.py b/sdk/loadtesting/azure-developer-loadtesting/setup.py index 0d0752f9040b..4a256ffedbaa 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/setup.py +++ b/sdk/loadtesting/azure-developer-loadtesting/setup.py @@ -5,7 +5,7 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# coding: utf-8 + import os import re @@ -14,9 +14,10 @@ PACKAGE_NAME = "azure-developer-loadtesting" PACKAGE_PPRINT_NAME = "Azure Developer Loadtesting" +PACKAGE_NAMESPACE = "customizations" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: @@ -29,7 +30,7 @@ setup( name=PACKAGE_NAME, version=version, - description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), long_description=open("README.md", "r").read(), long_description_content_type="text/markdown", license="MIT License", @@ -42,7 +43,6 @@ "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -53,19 +53,16 @@ packages=find_packages( exclude=[ "tests", - # Exclude packages that will be covered by PEP420 or nspkg - "azure", - "azure.developer", ] ), include_package_data=True, package_data={ - "azure.developer.loadtesting": ["py.typed"], + "customizations": ["py.typed"], }, install_requires=[ "isodate>=0.6.1", "azure-core>=1.30.0", "typing-extensions>=4.6.0", ], - python_requires=">=3.8", + python_requires=">=3.9", ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_administration_ops.py b/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_administration_ops.py index 655cfce51f17..69232f7a0183 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_administration_ops.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_administration_ops.py @@ -16,6 +16,7 @@ DISPLAY_NAME = "TestingResourcePyTest" + class TestLoadTestAdministrationOperations(LoadTestingAsyncTest): @LoadTestingPreparer() @@ -36,8 +37,18 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -49,7 +60,8 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None await self.close_admin_client() @@ -137,7 +149,7 @@ async def test_delete_test_file(self, loadtesting_endpoint, loadtesting_test_id) assert result is None await self.close_admin_client() - + @LoadTestingPreparer() @recorded_by_proxy_async @pytest.mark.asyncio @@ -230,6 +242,7 @@ async def test_delete_load_test(self, loadtesting_endpoint, loadtesting_test_id) await self.close_admin_client() + class TestTestProfileAdministrationOperations(LoadTestingAsyncTest): @LoadTestingPreparer() @@ -250,8 +263,18 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -263,7 +286,8 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None await self.close_admin_client() @@ -283,7 +307,9 @@ async def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy_async @pytest.mark.asyncio - async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id): + async def test_create_or_update_test_profile( + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -297,16 +323,10 @@ async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtes "targetResourceConfigurations": { "kind": "FunctionsFlexConsumption", "configurations": { - "config1": { - "instanceMemoryMB": 2048, - "httpConcurrency": 20 - }, - "config2": { - "instanceMemoryMB": 4096, - "httpConcurrency": 100 - }, - } - } + "config1": {"instanceMemoryMB": 2048, "httpConcurrency": 20}, + "config2": {"instanceMemoryMB": 4096, "httpConcurrency": 100}, + }, + }, }, ) @@ -318,7 +338,7 @@ async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtes @pytest.mark.asyncio async def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): set_bodiless_matcher() - + client = self.create_administration_client(loadtesting_endpoint) result = await client.get_test_profile(loadtesting_test_profile_id) assert result is not None @@ -330,7 +350,7 @@ async def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_pro @pytest.mark.asyncio async def test_list_test_profiles(self, loadtesting_endpoint): set_bodiless_matcher() - + client = self.create_administration_client(loadtesting_endpoint) result = client.list_test_profiles() assert result is not None diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_run_ops.py b/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_run_ops.py index a35c670296df..c00da04555ee 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_run_ops.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_run_ops.py @@ -17,6 +17,7 @@ DISPLAY_NAME = "TestingResourcePyTest" NON_EXISTING_RESOURCE = "nonexistingresource" + class TestLoadTestRunOperations(LoadTestingAsyncTest): # Pre-requisite: Test creation is needed for test run related tests @@ -37,8 +38,18 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -50,7 +61,8 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -66,7 +78,7 @@ async def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): assert result is not None await self.close_admin_client() - + @LoadTestingPreparer() @recorded_by_proxy_async async def test_upload_test_file(self, loadtesting_endpoint, loadtesting_test_id): @@ -154,7 +166,7 @@ async def test_list_test_runs(self, loadtesting_endpoint): result = run_client.list_test_runs() assert result is not None items = [item async for item in result] - assert len(items) > 0 # Atleast one item in the page + assert len(items) > 0 # Atleast one item in the page await self.close_run_client() @@ -217,9 +229,7 @@ async def test_create_or_update_app_component( @LoadTestingPreparer() @recorded_by_proxy_async - async def test_get_app_component( - self, loadtesting_endpoint, loadtesting_test_run_id - ): + async def test_get_app_component(self, loadtesting_endpoint, loadtesting_test_run_id): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -260,9 +270,7 @@ async def test_create_or_update_server_metrics_config( @LoadTestingPreparer() @recorded_by_proxy_async - async def test_get_server_metrics_config( - self, loadtesting_endpoint, loadtesting_test_run_id - ): + async def test_get_server_metrics_config(self, loadtesting_endpoint, loadtesting_test_run_id): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -302,7 +310,7 @@ async def test_delete_test_run(self, loadtesting_endpoint, loadtesting_test_run_ assert result is None await self.close_run_client() - + @LoadTestingPreparer() @recorded_by_proxy_async async def test_delete_test(self, loadtesting_endpoint, loadtesting_test_id): @@ -315,6 +323,7 @@ async def test_delete_test(self, loadtesting_endpoint, loadtesting_test_id): await self.close_admin_client() + class TestTestProfileRunOperations(LoadTestingAsyncTest): # Pre-requisite: Test & Test Profile creation is needed for test profile run related tests @@ -335,8 +344,18 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -398,7 +417,9 @@ async def test_get_test_file(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy_async - async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id): + async def test_create_or_update_test_profile( + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -412,16 +433,10 @@ async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtes "targetResourceConfigurations": { "kind": "FunctionsFlexConsumption", "configurations": { - "config1": { - "instanceMemoryMB": 2048, - "httpConcurrency": 20 - }, - "config2": { - "instanceMemoryMB": 4096, - "httpConcurrency": 100 - }, - } - } + "config1": {"instanceMemoryMB": 2048, "httpConcurrency": 20}, + "config2": {"instanceMemoryMB": 4096, "httpConcurrency": 100}, + }, + }, }, ) assert result is not None @@ -441,7 +456,9 @@ async def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_pro @LoadTestingPreparer() @recorded_by_proxy_async - async def test_begin_test_profile_run(self, loadtesting_endpoint, loadtesting_test_profile_id, loadtesting_test_profile_run_id): + async def test_begin_test_profile_run( + self, loadtesting_endpoint, loadtesting_test_profile_id, loadtesting_test_profile_run_id + ): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_administration_ops.py b/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_administration_ops.py index 22b217d8200e..25ee309e1019 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_administration_ops.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_administration_ops.py @@ -14,6 +14,7 @@ DISPLAY_NAME = "TestingResourcePyTest" + class TestLoadTestAdministrationOperations(LoadTestingTest): @LoadTestingPreparer() @@ -33,8 +34,18 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -46,7 +57,8 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -110,7 +122,8 @@ def list_test_files(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy def test_create_or_update_app_components( - self, loadtesting_endpoint, loadtesting_test_id, loadtesting_app_component_id): + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_app_component_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -181,7 +194,7 @@ def test_delete_test_file(self, loadtesting_endpoint, loadtesting_test_id): client = self.create_administration_client(loadtesting_endpoint) result = client.delete_test_file(loadtesting_test_id, "sample.jmx") assert result is None - + @LoadTestingPreparer() @recorded_by_proxy def test_delete_load_test(self, loadtesting_endpoint, loadtesting_test_id): @@ -191,6 +204,7 @@ def test_delete_load_test(self, loadtesting_endpoint, loadtesting_test_id): result = client.delete_test(loadtesting_test_id) assert result is None + class TestTestProfileAdministrationOperations(LoadTestingTest): # Pre-requisite: Test creation is needed for test profile related tests @@ -211,8 +225,18 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -224,7 +248,8 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -239,7 +264,9 @@ def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy - def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id): + def test_create_or_update_test_profile( + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -253,20 +280,14 @@ def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_t "targetResourceConfigurations": { "kind": "FunctionsFlexConsumption", "configurations": { - "config1": { - "instanceMemoryMB": 2048, - "httpConcurrency": 20 - }, - "config2": { - "instanceMemoryMB": 4096, - "httpConcurrency": 100 - }, - } - } + "config1": {"instanceMemoryMB": 2048, "httpConcurrency": 20}, + "config2": {"instanceMemoryMB": 4096, "httpConcurrency": 100}, + }, + }, }, ) assert result is not None - + @LoadTestingPreparer() @recorded_by_proxy def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): @@ -286,7 +307,7 @@ def test_list_test_profiles(self, loadtesting_endpoint): assert result is not None items = [r for r in result] assert len(items) > 0 # page has atleast one item - + @LoadTestingPreparer() @recorded_by_proxy def test_delete_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_run_ops.py b/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_run_ops.py index be5b5d5efa29..ab60ea9c712b 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_run_ops.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_run_ops.py @@ -35,8 +35,18 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -48,7 +58,8 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -60,7 +71,7 @@ def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): client = self.create_administration_client(loadtesting_endpoint) result = client.get_test(loadtesting_test_id) assert result is not None - + @LoadTestingPreparer() @recorded_by_proxy def test_upload_test_file(self, loadtesting_endpoint, loadtesting_test_id): @@ -138,7 +149,7 @@ def test_list_test_runs(self, loadtesting_endpoint, loadtesting_test_id, loadtes result = run_client.list_test_runs() assert result is not None items = [item for item in result] - assert len(items) > 0 # Atleast one item in the page + assert len(items) > 0 # Atleast one item in the page @LoadTestingPreparer() @recorded_by_proxy @@ -195,9 +206,7 @@ def test_create_or_update_app_component( @LoadTestingPreparer() @recorded_by_proxy - def test_get_app_component( - self, loadtesting_endpoint, loadtesting_test_run_id - ): + def test_get_app_component(self, loadtesting_endpoint, loadtesting_test_run_id): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -234,9 +243,7 @@ def test_create_or_update_server_metrics_config( @LoadTestingPreparer() @recorded_by_proxy - def test_get_server_metrics_config( - self, loadtesting_endpoint, loadtesting_test_run_id - ): + def test_get_server_metrics_config(self, loadtesting_endpoint, loadtesting_test_run_id): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -275,7 +282,7 @@ def test_delete_test_run(self, loadtesting_endpoint, loadtesting_test_run_id): result = run_client.delete_test_run(loadtesting_test_run_id) assert result is None - + @LoadTestingPreparer() @recorded_by_proxy def test_delete_test(self, loadtesting_endpoint, loadtesting_test_id): @@ -286,6 +293,7 @@ def test_delete_test(self, loadtesting_endpoint, loadtesting_test_id): result = client.delete_test(loadtesting_test_id) assert result is None + class TestTestProfileRunOperations(LoadTestingTest): # Pre-requisite: Test & Test Profile creation is needed for test profile run related tests @@ -306,8 +314,18 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -319,7 +337,8 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -331,7 +350,7 @@ def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): client = self.create_administration_client(loadtesting_endpoint) result = client.get_test(loadtesting_test_id) assert result is not None - + @LoadTestingPreparer() @recorded_by_proxy def test_upload_test_file(self, loadtesting_endpoint, loadtesting_test_id): @@ -360,7 +379,9 @@ def test_get_test_file(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy - def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id): + def test_create_or_update_test_profile( + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -374,20 +395,14 @@ def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_t "targetResourceConfigurations": { "kind": "FunctionsFlexConsumption", "configurations": { - "config1": { - "instanceMemoryMB": 2048, - "httpConcurrency": 20 - }, - "config2": { - "instanceMemoryMB": 4096, - "httpConcurrency": 100 - }, - } - } + "config1": {"instanceMemoryMB": 2048, "httpConcurrency": 20}, + "config2": {"instanceMemoryMB": 4096, "httpConcurrency": 100}, + }, + }, }, ) assert result is not None - + @LoadTestingPreparer() @recorded_by_proxy def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): @@ -399,7 +414,9 @@ def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_i @LoadTestingPreparer() @recorded_by_proxy - def test_begin_test_profile_run(self, loadtesting_endpoint, loadtesting_test_profile_id, loadtesting_test_profile_run_id): + def test_begin_test_profile_run( + self, loadtesting_endpoint, loadtesting_test_profile_id, loadtesting_test_profile_run_id + ): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -428,7 +445,7 @@ def test_get_test_profile_run(self, loadtesting_endpoint, loadtesting_test_profi result = run_client.get_test_profile_run(loadtesting_test_profile_run_id) assert result is not None assert len(result["recommendations"]) > 0 - + @LoadTestingPreparer() @recorded_by_proxy def test_stop_test_profile_run(self, loadtesting_endpoint, loadtesting_test_profile_id): @@ -460,7 +477,7 @@ def test_delete_test_profile_run(self, loadtesting_endpoint, loadtesting_test_pr result = run_client.delete_test_profile_run(loadtesting_test_profile_run_id) assert result is None - + @LoadTestingPreparer() @recorded_by_proxy def test_delete_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/testcase.py b/sdk/loadtesting/azure-developer-loadtesting/tests/testcase.py index 08c5ecb92644..df4dc143934d 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/testcase.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/testcase.py @@ -38,5 +38,5 @@ def create_run_client(self, endpoint) -> LoadTestRunClient: loadtesting_app_component_id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myRG/providers/Microsoft.Web/sites/contoso-sampleapp", loadtesting_test_profile_id="some-test-profile-id", loadtesting_target_resource_id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myRG/providers/Microsoft.Web/sites/myFlexFunction", - loadtesting_test_profile_run_id="some-test-profile-run-id" + loadtesting_test_profile_run_id="some-test-profile-run-id", ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/testcase_async.py b/sdk/loadtesting/azure-developer-loadtesting/tests/testcase_async.py index e7ad3bfcd12c..7e256725c64a 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/testcase_async.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/testcase_async.py @@ -16,7 +16,7 @@ def create_administration_client(self, endpoint) -> LoadTestAdministrationClient credential=self.admin_credential, endpoint=endpoint, ) - + return self.admin_client def create_run_client(self, endpoint) -> LoadTestRunClient: @@ -28,11 +28,11 @@ def create_run_client(self, endpoint) -> LoadTestRunClient: ) return self.run_client - + async def close_admin_client(self): await self.admin_credential.close() await self.admin_client.close() - + async def close_run_client(self): await self.run_credential.close() await self.run_client.close() diff --git a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_client.py b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_client.py index eb46f324e1ba..7c3e9cada966 100644 --- a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_client.py +++ b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_client.py @@ -15,14 +15,14 @@ from azure.core.rest import HttpRequest, HttpResponse from ._configuration import OnlineExperimentationClientConfiguration -from ._operations import OnlineExperimentationClientOperationsMixin +from ._operations._operations import _OnlineExperimentationClientOperationsMixin from ._utils.serialization import Deserializer, Serializer if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class OnlineExperimentationClient(OnlineExperimentationClientOperationsMixin): +class OnlineExperimentationClient(_OnlineExperimentationClientOperationsMixin): """OnlineExperimentationClient. :param endpoint: Endpoint URL for the Online Experimentation workspace. Required. diff --git a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_operations/__init__.py b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_operations/__init__.py index d24bdc6a32ef..933fcd7d1b55 100644 --- a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_operations/__init__.py +++ b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import OnlineExperimentationClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "OnlineExperimentationClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_operations/_operations.py b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_operations/_operations.py index 37f164441e7a..6963f74ff4a8 100644 --- a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_operations/_operations.py +++ b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_operations/_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -223,7 +222,7 @@ def build_online_experimentation_list_metrics_request( # pylint: disable=name-t return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -class OnlineExperimentationClientOperationsMixin( # pylint: disable=name-too-long +class _OnlineExperimentationClientOperationsMixin( ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], OnlineExperimentationClientConfiguration] ): diff --git a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_utils/model_base.py b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_utils/model_base.py index aaa6692b2346..49d5c7259389 100644 --- a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_utils/model_base.py +++ b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/_utils/model_base.py @@ -1,4 +1,4 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. diff --git a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_client.py b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_client.py index 80f6e51059b0..9ec0639ed118 100644 --- a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_client.py +++ b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_client.py @@ -16,13 +16,13 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import OnlineExperimentationClientConfiguration -from ._operations import OnlineExperimentationClientOperationsMixin +from ._operations._operations import _OnlineExperimentationClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class OnlineExperimentationClient(OnlineExperimentationClientOperationsMixin): +class OnlineExperimentationClient(_OnlineExperimentationClientOperationsMixin): """OnlineExperimentationClient. :param endpoint: Endpoint URL for the Online Experimentation workspace. Required. diff --git a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_operations/__init__.py b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_operations/__init__.py index d24bdc6a32ef..933fcd7d1b55 100644 --- a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_operations/__init__.py +++ b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import OnlineExperimentationClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "OnlineExperimentationClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_operations/_operations.py b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_operations/_operations.py index 55c5bd724563..6515efd39b57 100644 --- a/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_operations/_operations.py +++ b/sdk/onlineexperimentation/azure-onlineexperimentation/azure/onlineexperimentation/aio/_operations/_operations.py @@ -49,7 +49,7 @@ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class OnlineExperimentationClientOperationsMixin( # pylint: disable=name-too-long +class _OnlineExperimentationClientOperationsMixin( ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], OnlineExperimentationClientConfiguration] ): diff --git a/sdk/onlineexperimentation/azure-onlineexperimentation/setup.py b/sdk/onlineexperimentation/azure-onlineexperimentation/setup.py index 0ae144b3bb0e..2824427f61ad 100644 --- a/sdk/onlineexperimentation/azure-onlineexperimentation/setup.py +++ b/sdk/onlineexperimentation/azure-onlineexperimentation/setup.py @@ -14,9 +14,10 @@ PACKAGE_NAME = "azure-onlineexperimentation" PACKAGE_PPRINT_NAME = "Azure Onlineexperimentation" +PACKAGE_NAMESPACE = "azure.onlineexperimentation" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: diff --git a/sdk/schemaregistry/azure-schemaregistry/_metadata.json b/sdk/schemaregistry/azure-schemaregistry/_metadata.json new file mode 100644 index 000000000000..69b11873d86e --- /dev/null +++ b/sdk/schemaregistry/azure-schemaregistry/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "2023-07-01" +} \ No newline at end of file diff --git a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/__init__.py b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/__init__.py index 3c06cf24a912..11473a60c191 100644 --- a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/__init__.py +++ b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/__init__.py @@ -12,6 +12,7 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import +from ._client import SchemaRegistryClient # type: ignore from ._version import VERSION __version__ = VERSION diff --git a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_client.py b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_client.py index 4f23a2d9ed0d..5a04e5664d99 100644 --- a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_client.py +++ b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_client.py @@ -15,14 +15,14 @@ from azure.core.rest import HttpRequest, HttpResponse from ._configuration import SchemaRegistryClientConfiguration -from ._operations import SchemaRegistryClientOperationsMixin +from ._operations._operations import _SchemaRegistryClientOperationsMixin from ._utils.serialization import Deserializer, Serializer if TYPE_CHECKING: from azure.core.credentials import TokenCredential -class SchemaRegistryClient(SchemaRegistryClientOperationsMixin): +class SchemaRegistryClient(_SchemaRegistryClientOperationsMixin): """SchemaRegistryClient is a client for registering and retrieving schemas from the Azure Schema Registry service. diff --git a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_operations/__init__.py b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_operations/__init__.py index 68111dccce5b..933fcd7d1b55 100644 --- a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_operations/__init__.py +++ b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import SchemaRegistryClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "SchemaRegistryClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_operations/_operations.py b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_operations/_operations.py index df7d6e330ea4..4e58149ca5e7 100644 --- a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_operations/_operations.py +++ b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/_operations/_operations.py @@ -6,7 +6,8 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from collections.abc import MutableMapping -from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, TypeVar +import json +from typing import Any, Callable, Dict, Iterator, List, Optional, TypeVar import urllib.parse from azure.core import PipelineClient @@ -27,7 +28,7 @@ from azure.core.utils import case_insensitive_dict from .._configuration import SchemaRegistryClientConfiguration -from .._utils.model_base import _deserialize +from .._utils.model_base import SdkJSONEncoder, _deserialize from .._utils.serialization import Serializer from .._utils.utils import ClientMixinABC @@ -198,10 +199,12 @@ def build_schema_registry_register_schema_request( # pylint: disable=name-too-l return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -class SchemaRegistryClientOperationsMixin(ClientMixinABC[PipelineClient, SchemaRegistryClientConfiguration]): +class _SchemaRegistryClientOperationsMixin( + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], SchemaRegistryClientConfiguration] +): @distributed_trace - def _list_schema_groups(self, **kwargs: Any) -> Iterable[str]: + def _list_schema_groups(self, **kwargs: Any) -> ItemPaged[str]: """Get list of schema groups. Gets the list of schema groups user is authorized to access. @@ -291,7 +294,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def _list_schema_versions(self, group_name: str, schema_name: str, **kwargs: Any) -> Iterable[int]: + def _list_schema_versions(self, group_name: str, schema_name: str, **kwargs: Any) -> ItemPaged[int]: """List schema versions. Gets the list of all versions of one schema. diff --git a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/__init__.py b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/__init__.py index f7837a62cd2b..9ffb253040e2 100644 --- a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/__init__.py +++ b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/__init__.py @@ -12,6 +12,8 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import +from ._client import SchemaRegistryClient # type: ignore + try: from ._patch import __all__ as _patch_all from ._patch import * diff --git a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_client.py b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_client.py index d11e49aa9674..c9b59c897ede 100644 --- a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_client.py +++ b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_client.py @@ -16,13 +16,13 @@ from .._utils.serialization import Deserializer, Serializer from ._configuration import SchemaRegistryClientConfiguration -from ._operations import SchemaRegistryClientOperationsMixin +from ._operations._operations import _SchemaRegistryClientOperationsMixin if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential -class SchemaRegistryClient(SchemaRegistryClientOperationsMixin): +class SchemaRegistryClient(_SchemaRegistryClientOperationsMixin): """SchemaRegistryClient is a client for registering and retrieving schemas from the Azure Schema Registry service. diff --git a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_operations/__init__.py b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_operations/__init__.py index 68111dccce5b..933fcd7d1b55 100644 --- a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_operations/__init__.py +++ b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_operations/__init__.py @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import SchemaRegistryClientOperationsMixin # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "SchemaRegistryClientOperationsMixin", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_operations/_operations.py b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_operations/_operations.py index 5ec6f43ed5e3..9e78608372e1 100644 --- a/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_operations/_operations.py +++ b/sdk/schemaregistry/azure-schemaregistry/azure/schemaregistry/aio/_operations/_operations.py @@ -7,7 +7,8 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from collections.abc import MutableMapping -from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, List, Optional, TypeVar +import json +from typing import Any, AsyncIterator, Callable, Dict, List, Optional, TypeVar import urllib.parse from azure.core import AsyncPipelineClient @@ -36,7 +37,7 @@ build_schema_registry_list_schema_versions_request, build_schema_registry_register_schema_request, ) -from ..._utils.model_base import _deserialize +from ..._utils.model_base import SdkJSONEncoder, _deserialize from ..._utils.utils import ClientMixinABC from .._configuration import SchemaRegistryClientConfiguration @@ -44,10 +45,12 @@ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class SchemaRegistryClientOperationsMixin(ClientMixinABC[AsyncPipelineClient, SchemaRegistryClientConfiguration]): +class _SchemaRegistryClientOperationsMixin( + ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], SchemaRegistryClientConfiguration] +): @distributed_trace - def _list_schema_groups(self, **kwargs: Any) -> AsyncIterable[str]: + def _list_schema_groups(self, **kwargs: Any) -> AsyncItemPaged[str]: """Get list of schema groups. Gets the list of schema groups user is authorized to access. @@ -137,7 +140,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace - def _list_schema_versions(self, group_name: str, schema_name: str, **kwargs: Any) -> AsyncIterable[int]: + def _list_schema_versions(self, group_name: str, schema_name: str, **kwargs: Any) -> AsyncItemPaged[int]: """List schema versions. Gets the list of all versions of one schema. diff --git a/sdk/schemaregistry/azure-schemaregistry/generated_tests/conftest.py b/sdk/schemaregistry/azure-schemaregistry/generated_tests/conftest.py new file mode 100644 index 000000000000..0edb76ca93e9 --- /dev/null +++ b/sdk/schemaregistry/azure-schemaregistry/generated_tests/conftest.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + schemaregistry_subscription_id = os.environ.get( + "SCHEMAREGISTRY_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + schemaregistry_tenant_id = os.environ.get("SCHEMAREGISTRY_TENANT_ID", "00000000-0000-0000-0000-000000000000") + schemaregistry_client_id = os.environ.get("SCHEMAREGISTRY_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + schemaregistry_client_secret = os.environ.get( + "SCHEMAREGISTRY_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=schemaregistry_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=schemaregistry_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=schemaregistry_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=schemaregistry_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/schemaregistry/azure-schemaregistry/generated_tests/testpreparer.py b/sdk/schemaregistry/azure-schemaregistry/generated_tests/testpreparer.py new file mode 100644 index 000000000000..08c05c39d9b8 --- /dev/null +++ b/sdk/schemaregistry/azure-schemaregistry/generated_tests/testpreparer.py @@ -0,0 +1,26 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.schemaregistry import SchemaRegistryClient +from devtools_testutils import AzureRecordedTestCase, PowerShellPreparer +import functools + + +class SchemaRegistryClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(SchemaRegistryClient) + return self.create_client_from_credential( + SchemaRegistryClient, + credential=credential, + endpoint=endpoint, + ) + + +SchemaRegistryPreparer = functools.partial( + PowerShellPreparer, "schemaregistry", schemaregistry_endpoint="https://fake_schemaregistry_endpoint.com" +) diff --git a/sdk/schemaregistry/azure-schemaregistry/generated_tests/testpreparer_async.py b/sdk/schemaregistry/azure-schemaregistry/generated_tests/testpreparer_async.py new file mode 100644 index 000000000000..3b3c633aac49 --- /dev/null +++ b/sdk/schemaregistry/azure-schemaregistry/generated_tests/testpreparer_async.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.schemaregistry.aio import SchemaRegistryClient +from devtools_testutils import AzureRecordedTestCase + + +class SchemaRegistryClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(SchemaRegistryClient, is_async=True) + return self.create_client_from_credential( + SchemaRegistryClient, + credential=credential, + endpoint=endpoint, + )