Skip to content

Commit e91fce5

Browse files
committed
PR fixes, among them: bring back the documentation for deprecated modelset and modelrun. Validate that all required arguments appear in the new commands (+test it)
1 parent 5448fc3 commit e91fce5

File tree

6 files changed

+87
-28
lines changed

6 files changed

+87
-28
lines changed

redisai/client.py

Lines changed: 50 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ def modelstore(
212212
res = self.execute_command(*args)
213213
return res if not self.enable_postprocess else processor.modelstore(res)
214214

215-
@deprecated(version="1.2.2", reason="Use modelstore instead")
215+
@deprecated(version="1.2.0", reason="Use modelstore instead")
216216
def modelset(
217217
self,
218218
key: AnyStr,
@@ -226,8 +226,34 @@ def modelset(
226226
outputs: Union[AnyStr, List[AnyStr]] = None,
227227
) -> str:
228228
"""
229-
Similar to modelstore (this is the deprecated version that will not be
230-
supported in future versions).
229+
Set the model on provided key.
230+
231+
Parameters
232+
----------
233+
key : AnyStr
234+
Key name
235+
backend : str
236+
Backend name. Allowed backends are TF, TORCH, TFLITE, ONNX
237+
device : str
238+
Device name. Allowed devices are CPU and GPU. If multiple GPUs are available,
239+
it can be specified using the format GPU:<gpu number>. For example: GPU:0
240+
data : bytes
241+
Model graph read as bytes string
242+
batch : int
243+
Number of batches for doing auto-batching
244+
minbatch : int
245+
Minimum number of samples required in a batch for model execution
246+
tag : AnyStr
247+
Any string that will be saved in RedisAI as tag for the model
248+
inputs : Union[AnyStr, List[AnyStr]]
249+
Input node(s) in the graph. Required only Tensorflow graphs
250+
outputs : Union[AnyStr, List[AnyStr]]
251+
Output node(s) in the graph Required only for Tensorflow graphs
252+
253+
Returns
254+
-------
255+
str
256+
'OK' if success, raise an exception otherwise
231257
232258
Example
233259
-------
@@ -347,16 +373,34 @@ def modelexecute(
347373
res = self.execute_command(*args)
348374
return res if not self.enable_postprocess else processor.modelexecute(res)
349375

350-
@deprecated(version="1.2.2", reason="Use modelexecute instead")
376+
@deprecated(version="1.2.0", reason="Use modelexecute instead")
351377
def modelrun(
352378
self,
353379
key: AnyStr,
354380
inputs: Union[AnyStr, List[AnyStr]],
355381
outputs: Union[AnyStr, List[AnyStr]],
356382
) -> str:
357383
"""
358-
Similar to modelexecute (this is the deprecated version that will not be
359-
supported in future versions).
384+
Run the model using input(s) which are already in the scope and are associated
385+
to some keys. Modelrun also needs the output key name(s) to store the output
386+
from the model. The number of outputs from the model and the number of keys
387+
provided here must be same. Otherwise, RedisAI throws an error
388+
389+
Parameters
390+
----------
391+
key : str
392+
Model key to run
393+
inputs : Union[AnyStr, List[AnyStr]]
394+
Tensor(s) which is already saved in the RedisAI using a tensorset call. These
395+
tensors will be used as the input for the modelrun
396+
outputs : Union[AnyStr, List[AnyStr]]
397+
keys on which the outputs to be saved. If those keys exist already, modelrun
398+
will overwrite them with new values
399+
400+
Returns
401+
-------
402+
str
403+
'OK' if success, raise an exception otherwise
360404
361405
Example
362406
-------

redisai/command_builder.py

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,12 @@ def modelstore(
2323
inputs: Union[AnyStr, List[AnyStr]],
2424
outputs: Union[AnyStr, List[AnyStr]],
2525
) -> Sequence:
26+
if name is None:
27+
raise ValueError("Model name was not given")
2628
if device.upper() not in utils.allowed_devices:
27-
raise ValueError(
28-
f"Device not allowed. Use any from {utils.allowed_devices}")
29+
raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}")
2930
if backend.upper() not in utils.allowed_backends:
30-
raise ValueError(
31-
f"Backend not allowed. Use any from {utils.allowed_backends}")
31+
raise ValueError(f"Backend not allowed. Use any from {utils.allowed_backends}")
3232
args = ["AI.MODELSTORE", name, backend, device]
3333

3434
if tag is not None:
@@ -64,8 +64,7 @@ def modelstore(
6464
"Inputs and outputs keywords should not be specified for this backend"
6565
)
6666
chunk_size = 500 * 1024 * 1024 # TODO: this should be configurable.
67-
data_chunks = [data[i: i + chunk_size]
68-
for i in range(0, len(data), chunk_size)]
67+
data_chunks = [data[i : i + chunk_size] for i in range(0, len(data), chunk_size)]
6968
# TODO: need a test case for this
7069
args += ["BLOB", *data_chunks]
7170
return args
@@ -83,11 +82,9 @@ def modelset(
8382
outputs: Union[AnyStr, List[AnyStr]],
8483
) -> Sequence:
8584
if device.upper() not in utils.allowed_devices:
86-
raise ValueError(
87-
f"Device not allowed. Use any from {utils.allowed_devices}")
85+
raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}")
8886
if backend.upper() not in utils.allowed_backends:
89-
raise ValueError(
90-
f"Backend not allowed. Use any from {utils.allowed_backends}")
87+
raise ValueError(f"Backend not allowed. Use any from {utils.allowed_backends}")
9188
args = ["AI.MODELSET", name, backend, device]
9289

9390
if tag is not None:
@@ -101,13 +98,11 @@ def modelset(
10198

10299
if backend.upper() == "TF":
103100
if not (all((inputs, outputs))):
104-
raise ValueError(
105-
"Require keyword arguments input and output for TF models")
101+
raise ValueError("Require keyword arguments input and output for TF models")
106102
args += ["INPUTS", *utils.listify(inputs)]
107103
args += ["OUTPUTS", *utils.listify(outputs)]
108104
chunk_size = 500 * 1024 * 1024
109-
data_chunks = [data[i: i + chunk_size]
110-
for i in range(0, len(data), chunk_size)]
105+
data_chunks = [data[i : i + chunk_size] for i in range(0, len(data), chunk_size)]
111106
# TODO: need a test case for this
112107
args += ["BLOB", *data_chunks]
113108
return args
@@ -130,6 +125,8 @@ def modelexecute(
130125
outputs: Union[AnyStr, List[AnyStr]],
131126
timeout: int,
132127
) -> Sequence:
128+
if name is None or inputs is None or outputs is None:
129+
raise ValueError("Missing required arguments for model execute command")
133130
args = [
134131
"AI.MODELEXECUTE",
135132
name,
@@ -209,8 +206,7 @@ def tensorget(key: AnyStr, as_numpy: bool = True, meta_only: bool = False) -> Se
209206

210207
def scriptset(name: AnyStr, device: str, script: str, tag: AnyStr = None) -> Sequence:
211208
if device.upper() not in utils.allowed_devices:
212-
raise ValueError(
213-
f"Device not allowed. Use any from {utils.allowed_devices}")
209+
raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}")
214210
args = ["AI.SCRIPTSET", name, device]
215211
if tag:
216212
args += ["TAG", tag]

redisai/postprocessor.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,7 @@ def tensorget(res, as_numpy, as_numpy_mutable, meta_only):
4242
mutable=False,
4343
)
4444
else:
45-
target = float if rai_result["dtype"] in (
46-
"FLOAT", "DOUBLE") else int
45+
target = float if rai_result["dtype"] in ("FLOAT", "DOUBLE") else int
4746
utils.recursive_bytetransform(rai_result["values"], target)
4847
return rai_result
4948

redisai/utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,7 @@ def numpy2blob(tensor: np.ndarray) -> tuple:
2626
try:
2727
dtype = dtype_dict[str(tensor.dtype)]
2828
except KeyError:
29-
raise TypeError(
30-
f"RedisAI doesn't support tensors of type {tensor.dtype}")
29+
raise TypeError(f"RedisAI doesn't support tensors of type {tensor.dtype}")
3130
shape = tensor.shape
3231
blob = bytes(tensor.data)
3332
return dtype, shape, blob

setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,15 @@
66

77
setup(
88
name="redisai",
9-
version="1.0.2",
9+
version="1.2.0",
1010
description="RedisAI Python Client",
1111
long_description=long_description,
1212
long_description_content_type="text/x-rst",
1313
url="http://github.com/RedisAI/redisai-py",
1414
author="RedisLabs",
1515
author_email="[email protected]",
1616
packages=find_packages(),
17-
install_requires=["redis", "hiredis", "numpy"],
17+
install_requires=["redis", "hiredis", "numpy", "deprecated"],
1818
python_requires=">=3.6",
1919
classifiers=[
2020
"Development Status :: 4 - Beta",

test/test.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,18 @@ def test_modelstore_errors(self):
162162
model_path = os.path.join(MODEL_DIR, "graph.pb")
163163
model_pb = load_model(model_path)
164164
con = self.get_client()
165+
166+
with self.assertRaises(ValueError) as e:
167+
con.modelstore(
168+
None,
169+
"TF",
170+
"CPU",
171+
model_pb,
172+
inputs=["a", "b"],
173+
outputs=["mul"]
174+
)
175+
self.assertEqual(str(e.exception), "Model name was not given")
176+
165177
with self.assertRaises(ValueError) as e:
166178
con.modelstore(
167179
"m",
@@ -304,6 +316,15 @@ def test_run_tf_model(self):
304316
"m", "tf", "cpu", model_pb, inputs=["a", "b"], outputs="mul", tag="v1.0"
305317
)
306318

319+
# Required arguments ar None
320+
with self.assertRaises(ValueError) as e:
321+
con.modelexecute(
322+
"m",
323+
inputs=None,
324+
outputs=None
325+
)
326+
self.assertEqual(str(e.exception), "Missing required arguments for model execute command")
327+
307328
# wrong model
308329
with self.assertRaises(ResponseError) as e:
309330
con.modelstore(

0 commit comments

Comments
 (0)