Skip to content
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
changed TFLITE device to be env DEVICE. GPU delegate fails to load
  • Loading branch information
DvirDukhan committed May 9, 2021
commit de5285e40254426da3b6ea53faae4234a6c553fb
20 changes: 10 additions & 10 deletions tests/flow/tests_tflite.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@ def test_run_tflite_model(env):
model_pb = load_file_content('mnist_model_quant.tflite')
sample_raw = load_file_content('one.raw')

ret = con.execute_command('AI.MODELSTORE', 'm{1}', 'TFLITE', 'CPU', 'BLOB', model_pb)
ret = con.execute_command('AI.MODELSTORE', 'm{1}', 'TFLITE', DEVICE, 'BLOB', model_pb)
env.assertEqual(ret, b'OK')

ret = con.execute_command('AI.MODELGET', 'm{1}', 'META')
env.assertEqual(len(ret), 14)
env.assertEqual(ret[5], b'')

ret = con.execute_command('AI.MODELSTORE', 'm{1}', 'TFLITE', 'CPU', 'TAG', 'asdf', 'BLOB', model_pb)
ret = con.execute_command('AI.MODELSTORE', 'm{1}', 'TFLITE', DEVICE, 'TAG', 'asdf', 'BLOB', model_pb)
env.assertEqual(ret, b'OK')

ret = con.execute_command('AI.MODELGET', 'm{1}', 'META')
Expand All @@ -40,7 +40,7 @@ def test_run_tflite_model(env):
# TODO: enable me. CI is having issues on GPU asserts of TFLITE and CPU
if DEVICE == "CPU":
env.assertEqual(ret[1], b'TFLITE')
env.assertEqual(ret[3], b'CPU')
env.assertEqual(ret[3], bDEVICE)

con.execute_command('AI.MODELEXECUTE', 'm{1}', 'INPUTS', 1, 'a{1}', 'OUTPUTS', 2, 'b{1}', 'c{1}')
values = con.execute_command('AI.TENSORGET', 'b{1}', 'VALUES')
Expand All @@ -58,17 +58,17 @@ def test_run_tflite_model_errors(env):
sample_raw = load_file_content('one.raw')
wrong_model_pb = load_file_content('graph.pb')

ret = con.execute_command('AI.MODELSTORE', 'm_2{1}', 'TFLITE', 'CPU', 'BLOB', model_pb)
ret = con.execute_command('AI.MODELSTORE', 'm_2{1}', 'TFLITE', DEVICE, 'BLOB', model_pb)
env.assertEqual(ret, b'OK')

check_error_message(env, con, "Failed to load model from buffer",
'AI.MODELSTORE', 'm{1}', 'TFLITE', 'CPU', 'TAG', 'asdf', 'BLOB', wrong_model_pb)
'AI.MODELSTORE', 'm{1}', 'TFLITE', DEVICE, 'TAG', 'asdf', 'BLOB', wrong_model_pb)

# TODO: Autobatch is tricky with TFLITE because TFLITE expects a fixed batch
# size. At least we should constrain MINBATCHSIZE according to the
# hard-coded dims in the tflite model.
check_error_message(env, con, "Auto-batching not supported by the TFLITE backend",
'AI.MODELSTORE', 'm{1}', 'TFLITE', 'CPU',
'AI.MODELSTORE', 'm{1}', 'TFLITE', DEVICE,
'BATCHSIZE', 2, 'MINBATCHSIZE', 2, 'BLOB', model_pb)

ret = con.execute_command('AI.TENSORSET', 'a{1}', 'FLOAT', 1, 1, 28, 28, 'BLOB', sample_raw)
Expand Down Expand Up @@ -96,7 +96,7 @@ def test_tflite_modelinfo(env):
model_pb = load_file_content('mnist_model_quant.tflite')
sample_raw = load_file_content('one.raw')

ret = con.execute_command('AI.MODELSTORE', 'mnist{1}', 'TFLITE', 'CPU', 'BLOB', model_pb)
ret = con.execute_command('AI.MODELSTORE', 'mnist{1}', 'TFLITE', DEVICE, 'BLOB', model_pb)
env.assertEqual(ret, b'OK')

ret = con.execute_command('AI.TENSORSET', 'a{1}', 'FLOAT', 1, 1, 28, 28, 'BLOB', sample_raw)
Expand Down Expand Up @@ -143,7 +143,7 @@ def test_tflite_modelrun_disconnect(env):
model_pb = load_file_content('mnist_model_quant.tflite')
sample_raw = load_file_content('one.raw')

ret = red.execute_command('AI.MODELSTORE', 'mnist{1}', 'TFLITE', 'CPU', 'BLOB', model_pb)
ret = red.execute_command('AI.MODELSTORE', 'mnist{1}', 'TFLITE', DEVICE, 'BLOB', model_pb)
env.assertEqual(ret, b'OK')

ret = red.execute_command('AI.TENSORSET', 'a{1}', 'FLOAT', 1, 1, 28, 28, 'BLOB', sample_raw)
Expand All @@ -164,7 +164,7 @@ def test_tflite_model_rdb_save_load(env):
con = env.getConnection()
model_pb = load_file_content('mnist_model_quant.tflite')

ret = con.execute_command('AI.MODELSTORE', 'mnist{1}', 'TFLITE', 'CPU', 'BLOB', model_pb)
ret = con.execute_command('AI.MODELSTORE', 'mnist{1}', 'TFLITE', DEVICE, 'BLOB', model_pb)
env.assertEqual(ret, b'OK')

model_serialized_memory = con.execute_command('AI.MODELGET', 'mnist{1}', 'BLOB')
Expand Down Expand Up @@ -196,7 +196,7 @@ def test_tflite_info(env):

model_pb = load_file_content('mnist_model_quant.tflite')

con.execute_command('AI.MODELSTORE', 'mnist{1}', 'TFLITE', 'CPU', 'BLOB', model_pb)
con.execute_command('AI.MODELSTORE', 'mnist{1}', 'TFLITE', DEVICE, 'BLOB', model_pb)

ret = con.execute_command('AI.INFO')
env.assertEqual(8, len(ret))
Expand Down