Skip to content

Commit bc59d77

Browse files
committed
test: test case update for pzmm and cassette refresh
1 parent 72630a9 commit bc59d77

File tree

500 files changed

+574
-407
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

500 files changed

+574
-407
lines changed

src/sasctl/_services/files.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -57,9 +57,7 @@ def create_file(cls, file, folder=None, filename=None, expiration=None):
5757
5858
"""
5959
if isinstance(file, (str, Path)):
60-
# file = str(file)
61-
# filename = filename or os.path.splitext(os.path.split(file)[1])[0]
62-
filename = Path(file).name
60+
filename = filename or Path(file).name
6361

6462
with open(file, "rb") as f:
6563
file = f.read()

src/sasctl/_services/model_repository.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -359,9 +359,10 @@ def create_model(
359359

360360
model["scoreCodeType"] = score_code_type or model.get("scoreCodeType")
361361
model["trainTable"] = training_table or model.get("trainTable")
362-
model["classificationEventProbabilityVariableName"] = (
363-
event_prob_variable
364-
or model.get("classificationEventProbabilityVariableName")
362+
model[
363+
"classificationEventProbabilityVariableName"
364+
] = event_prob_variable or model.get(
365+
"classificationEventProbabilityVariableName"
365366
)
366367
model["classificationTargetEventValue"] = event_target_value or model.get(
367368
"classificationTargetEventValue"

src/sasctl/pzmm/import_model.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,17 +24,17 @@ def get_model_properties(
2424
if type(model_files) is dict:
2525
try:
2626
model = json.loads(model_files["ModelProperties.json"])
27-
except json.JSONDecodeError:
27+
except (json.JSONDecodeError, TypeError):
2828
model = model_files["ModelProperties.json"]
2929

3030
try:
3131
input_var = json.loads(model_files["inputVar.json"])
32-
except json.JSONDecodeError:
32+
except (json.JSONDecodeError, TypeError):
3333
input_var = model_files["inputVar.json"]
3434

3535
try:
3636
output_var = json.loads(model_files["outputVar.json"])
37-
except json.JSONDecodeError:
37+
except (json.JSONDecodeError, TypeError):
3838
output_var = model_files["outputVar.json"]
3939

4040
else:
@@ -333,7 +333,9 @@ def import_model(
333333
if isinstance(model_files, dict):
334334
zip_io_file = zm.zip_files(model_files, model_prefix, is_viya4=False)
335335
else:
336-
zip_io_file = zm.zip_files(Path(model_files), model_prefix, is_viya4=False)
336+
zip_io_file = zm.zip_files(
337+
Path(model_files), model_prefix, is_viya4=False
338+
)
337339
if cls.notebook_output:
338340
print(f"All model files were zipped to {Path(model_files)}.")
339341

@@ -459,7 +461,7 @@ def import_model(
459461
except AttributeError:
460462
print("Model failed to import to SAS Model Manager.")
461463

462-
score_code_dict = sc.write_score_code(
464+
score_code_dict = sc().write_score_code(
463465
model_prefix,
464466
input_data,
465467
predict_method,

src/sasctl/pzmm/pickle_model.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,7 @@ def pickle_trained_model(
7777
7878
"""
7979
from .write_score_code import ScoreCode
80+
8081
sanitized_prefix = ScoreCode.sanitize_model_prefix(model_prefix)
8182

8283
if is_binary_string:
@@ -132,7 +133,9 @@ def pickle_trained_model(
132133
"The h2o package is required to save the model as a mojo model."
133134
)
134135
trained_model.save_mojo(
135-
force=True, path=str(pickle_path), filename=f"{sanitized_prefix}.mojo"
136+
force=True,
137+
path=str(pickle_path),
138+
filename=f"{sanitized_prefix}.mojo",
136139
)
137140
elif is_binary_model or is_h2o_model:
138141
raise ValueError(

src/sasctl/pzmm/write_json_files.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -500,6 +500,7 @@ def write_file_metadata_json(
500500
"""
501501

502502
from .write_score_code import ScoreCode
503+
503504
sanitized_prefix = ScoreCode.sanitize_model_prefix(model_prefix)
504505

505506
dict_list = [
@@ -508,9 +509,13 @@ def write_file_metadata_json(
508509
{"role": "score", "name": f"score_{sanitized_prefix}.py"},
509510
]
510511
if is_h2o_model:
511-
dict_list.append({"role": "scoreResource", "name": sanitized_prefix + ".mojo"})
512+
dict_list.append(
513+
{"role": "scoreResource", "name": sanitized_prefix + ".mojo"}
514+
)
512515
elif is_tf_keras_model:
513-
dict_list.append({"role": "scoreResource", "name": sanitized_prefix + ".h5"})
516+
dict_list.append(
517+
{"role": "scoreResource", "name": sanitized_prefix + ".h5"}
518+
)
514519
else:
515520
dict_list.append(
516521
{"role": "scoreResource", "name": sanitized_prefix + ".pickle"}

src/sasctl/pzmm/write_score_code.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818

1919

2020
class ScoreCode:
21-
2221
def __init__(self):
2322
self.score_code = ""
2423

@@ -1068,6 +1067,7 @@ def _determine_returns_type(outputs: List[Any]) -> List[bool]:
10681067
classification values and `False` represents probability or prediction
10691068
values.
10701069
"""
1070+
10711071
def is_str(val):
10721072
if isinstance(val, str) or val == str:
10731073
return True

src/sasctl/tasks.py

Lines changed: 41 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def _compare_properties(project_name, model, input_vars=None, output_vars=None):
187187
)
188188

189189

190-
def _register_open_source_model(model, name, project, X):
190+
def _register_open_source_model(model, name, project, X, modeler):
191191
try:
192192
info = utils.get_model_info(model, X=X)
193193
except ValueError as e:
@@ -222,8 +222,9 @@ def _register_open_source_model(model, name, project, X):
222222
model_name=name,
223223
model_desc=info.description,
224224
model_algorithm=info.algorithm,
225+
modeler=modeler,
225226
target_variable=info.target_column,
226-
target_values=info.target_values
227+
target_values=info.target_values,
227228
)
228229

229230
# requirements = JSONFiles().create_requirements_json(model)
@@ -234,20 +235,24 @@ def _register_open_source_model(model, name, project, X):
234235
pzmm_files.update(metadata)
235236
pzmm_files.update(properties)
236237

237-
model_obj, _ = ImportModel().import_model(model_files=pzmm_files,
238-
model_prefix=name,
239-
project=project,
240-
input_data=info.X,
241-
predict_method=[info.predict_function, info.y.iloc[0].to_list()],
242-
predict_threshold=info.threshold,
243-
score_metrics=info.output_column_names,
244-
target_values=info.target_values,
245-
pickle_type=serialization_format,
246-
model_file_name=list(serialized_model.keys())[0])
238+
model_obj, _ = ImportModel().import_model(
239+
model_files=pzmm_files,
240+
model_prefix=name,
241+
project=project,
242+
input_data=info.X,
243+
predict_method=[info.predict_function, info.y.iloc[0].to_list()],
244+
predict_threshold=info.threshold,
245+
score_metrics=info.output_column_names,
246+
target_values=info.target_values,
247+
pickle_type=serialization_format,
248+
model_file_name=list(serialized_model.keys())[0],
249+
)
247250
return model_obj
248251

249252

250-
def _register_sas_model(model, name, project, create_project=False, version=None, X=None, repo_obj=None):
253+
def _register_sas_model(
254+
model, name, project, create_project=False, version=None, X=None, repo_obj=None
255+
):
251256
if "DataStepSrc" in model.columns:
252257
zip_file = utils.create_package_from_datastep(model, input=X)
253258
if create_project:
@@ -278,9 +283,7 @@ def _register_sas_model(model, name, project, create_project=False, version=None
278283
)
279284
else:
280285
model_props = {}
281-
project = _create_project(
282-
project, model_props, repo_obj, in_var, out_var
283-
)
286+
project = _create_project(project, model_props, repo_obj, in_var, out_var)
284287
model = mr.import_model_from_zip(name, project, zip_file, version=version)
285288
# Assume ASTORE model if not a DataStep model
286289
else:
@@ -320,9 +323,7 @@ def _register_sas_model(model, name, project, create_project=False, version=None
320323
if current_session().version_info() < 4:
321324
# Upload the model as a ZIP file if using Viya 3.
322325
zipfile = utils.create_package(model, input=input)
323-
model = mr.import_model_from_zip(
324-
name, project, zipfile, version=version
325-
)
326+
model = mr.import_model_from_zip(name, project, zipfile, version=version)
326327
else:
327328
# If using Viya 4, just upload the raw AStore and Model Manager will handle inspection.
328329
astore = cas.astore.download(rstore=model)
@@ -338,6 +339,7 @@ def _register_sas_model(model, name, project, create_project=False, version=None
338339
)
339340
return model
340341

342+
341343
def register_model(
342344
model,
343345
name,
@@ -348,7 +350,8 @@ def register_model(
348350
files=None,
349351
force=False,
350352
record_packages=True,
351-
input=None
353+
modeler=None,
354+
input=None,
352355
):
353356
"""Register a model in the model repository.
354357
@@ -389,6 +392,9 @@ def register_model(
389392
record_packages : bool, optional
390393
Capture Python packages registered in the environment. Defaults to
391394
True. Ignored if `model` is not a Python object.
395+
modeler : str, optional
396+
The name of the user who created the model. Will default ot the
397+
current user if not specified.
392398
input : DataFrame, type, list of type, or dict of str: type, optional
393399
Deprecated, use `X` instead.
394400
@@ -474,10 +480,20 @@ def register_model(
474480
"received '%r'." % (swat.CASTable, model)
475481
)
476482

477-
model_obj = _register_sas_model(model, name, project, repo_obj=repo_obj, X=X, create_project=create_project, version=version)
483+
model_obj = _register_sas_model(
484+
model,
485+
name,
486+
project,
487+
repo_obj=repo_obj,
488+
X=X,
489+
create_project=create_project,
490+
version=version,
491+
)
478492

479493
elif not isinstance(model, dict):
480-
model_obj = _register_open_source_model(model, name, project, X=X)
494+
model_obj = _register_open_source_model(
495+
model, name, project, X=X, modeler=modeler or current_session().username
496+
)
481497
else:
482498
project = _create_project(project, model, repo_obj)
483499

@@ -506,7 +522,9 @@ def register_model(
506522
if isinstance(file, dict):
507523
for k in file.keys():
508524
if k not in ("name", "file", "role"):
509-
raise ValueError(f"Invalid key '{k}' in `file` dictionary. Valid keys are 'name', 'file', and 'role'.")
525+
raise ValueError(
526+
f"Invalid key '{k}' in `file` dictionary. Valid keys are 'name', 'file', and 'role'."
527+
)
510528
mr.add_model_content(model_obj, **file)
511529
else:
512530
mr.add_model_content(model_obj, file)

src/sasctl/utils/astore.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -384,7 +384,6 @@ def regression_target(r):
384384
properties["targetVariable"] = classification_target(result)
385385

386386
elif algorithm in ("forest", "gradboost", "tree-based models"):
387-
388387
if algorithm == "forest":
389388
properties["algorithm"] = "Random forest"
390389
elif algorithm == "gradboost":

src/sasctl/utils/model_info.py

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -170,21 +170,24 @@ class PyTorchModelInfo(ModelInfo):
170170
"""Stores model information for a PyTorch model instance."""
171171

172172
def __init__(self, model, X, y=None):
173-
174173
if torch is None:
175-
raise RuntimeError("The PyTorch library must be installed to work with PyTorch models. Please `pip install torch`.")
174+
raise RuntimeError(
175+
"The PyTorch library must be installed to work with PyTorch models. Please `pip install torch`."
176+
)
176177

177178
if not isinstance(model, torch.nn.Module):
178179
raise ValueError(f"Expected PyTorch model, received {type(model)}.")
179180

180181
# Some models may take multiple tensors as input. These can be passed as a tuple
181182
# of tensors. To simplify processing, convert even single inputs into tuples.
182183
if not isinstance(X, tuple):
183-
X = (X, )
184+
X = (X,)
184185

185186
for x in X:
186187
if not isinstance(x, (np.ndarray, torch.Tensor)):
187-
raise ValueError(f"Expected input data to be a numpy array or PyTorch tensor, received {type(X)}.")
188+
raise ValueError(
189+
f"Expected input data to be a numpy array or PyTorch tensor, received {type(X)}."
190+
)
188191
# if X.ndim != 2:
189192
# raise ValueError(f"Expected input date with shape (n_samples, n_dim), received shape {X.shape}.")
190193

@@ -201,7 +204,9 @@ def __init__(self, model, X, y=None):
201204
y = model(*X)
202205

203206
if not isinstance(y, (np.ndarray, torch.Tensor)):
204-
raise ValueError(f"Expected output data to be a numpy array or PyTorch tensor, received {type(y)}.")
207+
raise ValueError(
208+
f"Expected output data to be a numpy array or PyTorch tensor, received {type(y)}."
209+
)
205210

206211
self._model = model
207212

@@ -342,8 +347,6 @@ class SklearnModelInfo(ModelInfo):
342347
}
343348

344349
def __init__(self, model, X, y):
345-
346-
347350
is_classifier = hasattr(model, "classes_")
348351
is_binary_classifier = is_classifier and len(model.classes_) == 2
349352
is_clusterer = hasattr(model, "cluster_centers_")
@@ -360,7 +363,11 @@ def __init__(self, model, X, y):
360363

361364
# If not a classfier or a clustering algorithm and output is a single column, then
362365
# assume its a regression algorithm
363-
is_regressor = not is_classifier and not is_clusterer and (y_df.shape[1] == 1 or "Regress" in type(model).__name__)
366+
is_regressor = (
367+
not is_classifier
368+
and not is_clusterer
369+
and (y_df.shape[1] == 1 or "Regress" in type(model).__name__)
370+
)
364371

365372
if not is_classifier and not is_regressor and not is_clusterer:
366373
raise ValueError(f"Unexpected model type {model} received.")

tests/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,7 @@ def add_placeholder(pattern, string, placeholder, group):
185185
# config.cassette_library_dir = 'tests/cassettes'
186186
# config.default_cassette_options['serialize_with'] = 'prettyjson'
187187
config.default_cassette_options["serialize_with"] = "binary"
188+
config.default_cassette_options["preserve_exact_body_bytes"] = True
188189
config.default_cassette_options["record_mode"] = record_mode
189190
config.default_cassette_options["match_requests_on"] = [
190191
"method",

tests/integration/cassettes/test_astore_models.test_bayesnet_binary_classification.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_bayesnet_binary_classification.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_bayesnet_binary_classification.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_bayesnet_classification.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_bayesnet_classification.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_bayesnet_classification.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_dtree_regression.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_dtree_regression.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_dtree_regression.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_forest_classification.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_forest_classification.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_forest_classification.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_forest_regression.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_forest_regression.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_forest_regression.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_forest_regression_with_nominals.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_forest_regression_with_nominals.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_forest_regression_with_nominals.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_glm.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_glm.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_glm.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_gradboost_binary_classification.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_gradboost_binary_classification.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_gradboost_binary_classification.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_gradboost_classification.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_gradboost_classification.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_gradboost_classification.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_gradboost_regression.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_gradboost_regression.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_gradboost_regression.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_gradboost_regression_with_nominals.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_gradboost_regression_with_nominals.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_gradboost_regression_with_nominals.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_logistic.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_logistic.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_logistic.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_neuralnet_regression.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_neuralnet_regression.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_neuralnet_regression.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_svm_classification.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_svm_classification.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_svm_classification.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_svm_regression.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_svm_regression.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_svm_regression.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_svm_regression_with_nominals.viya_202209.swat.lzma

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/integration/cassettes/test_astore_models.test_svm_regression_with_nominals.viya_202310.swat.lzma

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

tests/integration/cassettes/test_astore_models.test_svm_regression_with_nominals.viya_35.swat.lzma

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)