Skip to content

Commit fba684e

Browse files
committed
ODSC-38627: update docs
1 parent e094c74 commit fba684e

File tree

1 file changed

+15
-15
lines changed

1 file changed

+15
-15
lines changed

docs/source/user_guide/model_registration/quick_start.rst

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ Sklearn
2828
estimator=sklearn_estimator, artifact_dir=tempfile.mkdtemp()
2929
)
3030
31-
# Autogenerate score.py, pickled model, runtime.yaml, input_schema.json and output_schema.json
31+
# Autogenerate score.py, serialized model, runtime.yaml, input_schema.json and output_schema.json
3232
sklearn_model.prepare(
3333
inference_conda_env="dbexp_p38_cpu_v1",
3434
X_sample=X_train,
@@ -68,7 +68,7 @@ Create a model, prepare it, verify that it works, save it to the model catalog,
6868
# Instantite ads.model.framework.xgboost_model.XGBoostModel using the trained XGBoost Model
6969
xgboost_model = XGBoostModel(estimator=xgboost_estimator, artifact_dir=tempfile.mkdtemp())
7070
71-
# Autogenerate score.py, pickled model, runtime.yaml, input_schema.json and output_schema.json
71+
# Autogenerate score.py, serialized model, runtime.yaml, input_schema.json and output_schema.json
7272
xgboost_model.prepare(
7373
inference_conda_env="generalml_p38_cpu_v1",
7474
X_sample=X_train,
@@ -109,7 +109,7 @@ Create a model, prepare it, verify that it works, save it to the model catalog,
109109
# Instantite ads.model.lightgbm_model.XGBoostModel using the trained LGBM Model
110110
lightgbm_model = LightGBMModel(estimator=lightgbm_estimator, artifact_dir=tempfile.mkdtemp())
111111
112-
# Autogenerate score.py, pickled model, runtime.yaml, input_schema.json and output_schema.json
112+
# Autogenerate score.py, serialized model, runtime.yaml, input_schema.json and output_schema.json
113113
lightgbm_model.prepare(
114114
inference_conda_env="generalml_p38_cpu_v1",
115115
X_sample=X_train,
@@ -154,7 +154,7 @@ Create a model, prepare it, verify that it works, save it to the model catalog,
154154
# Verify generated artifacts
155155
torch_model.verify(test_data)
156156
157-
#Register PyTorch model
157+
# Register PyTorch model
158158
model_id = torch_model.save(display_name="PyTorch Model")
159159
160160
@@ -214,7 +214,7 @@ Create a model, prepare it, verify that it works, save it to the model catalog,
214214
# Verify generated artifacts
215215
prediction = spark_model.verify(test)
216216
217-
#Register Spark model
217+
# Register Spark model
218218
spark_model.save(display_name="Spark Pipeline Model")
219219
220220
@@ -248,13 +248,13 @@ Create a model, prepare it, verify that it works, save it to the model catalog,
248248
# Instantite ads.model.framework.tensorflow_model.TensorFlowModel using the pre-trained TensorFlow Model
249249
tf_model = TensorFlowModel(tf_estimator, artifact_dir=tempfile.mkdtemp())
250250
251-
# Autogenerate score.py, pickled model, runtime.yaml, input_schema.json and output_schema.json
251+
# Autogenerate score.py, serialized model, runtime.yaml, input_schema.json and output_schema.json
252252
tf_model.prepare(inference_conda_env="tensorflow28_p38_cpu_v1")
253253
254254
# Verify generated artifacts
255255
tf_model.verify(x_test[:1])
256256
257-
#Register TensorFlow model
257+
# Register TensorFlow model
258258
model_id = tf_model.save(display_name="TensorFlow Model")
259259
260260
HuggingFace Pipelines
@@ -284,20 +284,20 @@ HuggingFace Pipelines
284284
## Initiate a HuggingFacePipelineModel instance
285285
zero_shot_image_classification_model = HuggingFacePipelineModel(classifier, artifact_dir=empfile.mkdtemp())
286286
287-
## Prepare a model artifact
288-
conda = "oci://bucket@namespace/path/to/conda/pack"
289-
python_version = "3.8"
290-
zero_shot_image_classification_model.prepare(inference_conda_env=conda, inference_python_version = python_version, force_overwrite=True)
287+
# Autogenerate score.py, serialized model, runtime.yaml
288+
conda_pack_path = "oci://bucket@namespace/path/to/conda/pack"
289+
python_version = "3.x"
290+
zero_shot_image_classification_model.prepare(inference_conda_env=conda_pack_path, inference_python_version = python_version, force_overwrite=True)
291291
292292
## Test data
293293
data = {"images": image, "candidate_labels": ["animals", "humans", "landscape"]}
294294
body = cloudpickle.dumps(data) # convert image to bytes
295295
296-
## Verify
296+
# Verify generated artifacts
297297
zero_shot_image_classification_model.verify(data=data)
298298
zero_shot_image_classification_model.verify(data=body)
299299
300-
## Save
300+
# Register HuggingFace Pipeline model
301301
zero_shot_image_classification_model.save()
302302
303303
## Deploy
@@ -336,7 +336,7 @@ Other Frameworks
336336
generic_model = GenericModel(estimator=model, artifact_dir=tempfile.mkdtemp())
337337
generic_model.summary_status()
338338
339-
# Autogenerate score.py, pickled model, runtime.yaml, input_schema.json and output_schema.json
339+
# Autogenerate score.py, serialized model, runtime.yaml, input_schema.json and output_schema.json
340340
generic_model.prepare(
341341
inference_conda_env="dbexp_p38_cpu_v1",
342342
model_file_name="toy_model.pkl",
@@ -372,7 +372,7 @@ With Model Version Set
372372
# Within the context manager, you can save the :ref:`Model Serialization` model without specifying the ``model_version_set`` parameter because it's taken from the model context manager. If the model version set doesn't exist in the model catalog, the example creates a model version set named ``my_model_version_set``. If the model version set exists in the model catalog, the models are saved to that model version set.
373373
with ads.model.experiment(name="my_model_version_set", create_if_not_exists=True):
374374
375-
# Autogenerate score.py, pickled model, runtime.yaml, input_schema.json and output_schema.json
375+
# Autogenerate score.py, serialized model, runtime.yaml, input_schema.json and output_schema.json
376376
generic_model.prepare(
377377
inference_conda_env="dbexp_p38_cpu_v1",
378378
model_file_name="toy_model.pkl",

0 commit comments

Comments
 (0)