Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def load_ir_model(model_xml, device, plugin_dir, cpu_extension):
model_bin = os.path.splitext(model_xml)[0] + ".bin"
# initialize plugin
log.info("Initializing plugin for %s device...", device)
plugin = IEPlugin(device=device, plugin_dirs=plugin_dir)
if cpu_extension and 'CPU' in device:
plugin.add_cpu_extension(cpu_extension)
# read IR
log.info("Reading IR...")
net = IENetwork(model=model_xml, weights=model_bin)
if "CPU" in device:
supported_layers = plugin.get_supported_layers(net)
not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers]
if not_supported_layers:
log.error("Following layers are not supported by the plugin for specified device %s:\n %s",
device, ', '.join(not_supported_layers))
log.error("Please try to specify cpu extensions library path in sample's command line parameters using "
"--cpu_extension command line argument")
sys.exit(1)
def main():
log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout)
args = build_argparser().parse_args()
model_xml = args.model
model_bin = os.path.splitext(model_xml)[0] + ".bin"
# Plugin initialization for specified device and load extensions library if specified
log.info("Initializing plugin for {} device...".format(args.device))
plugin = IEPlugin(device=args.device, plugin_dirs=args.plugin_dir)
if args.cpu_extension and 'CPU' in args.device:
plugin.add_cpu_extension(args.cpu_extension)
# Read IR
log.info("Reading IR...")
net = IENetwork(model=model_xml, weights=model_bin)
if plugin.device == "CPU":
supported_layers = plugin.get_supported_layers(net)
not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers]
if len(not_supported_layers) != 0:
log.error("Following layers are not supported by the plugin for specified device {}:\n {}".
format(plugin.device, ', '.join(not_supported_layers)))
log.error("Please try to specify cpu extensions library path in demo's command line parameters using -l "
"or --cpu_extension command line argument")
sys.exit(1)
assert len(net.inputs.keys()) == 1, "Demo supports only single input topologies"
def main():
log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout)
args = build_argparser().parse_args()
assert args.device.split(':')[0] == "HETERO", "This demo supports only Hetero Plugin. " \
"Please specify correct device, e.g. HETERO:FPGA,CPU"
model_xml = args.model
model_bin = os.path.splitext(model_xml)[0] + ".bin"
# Plugin initialization for specified device and load extensions library if specified
plugin = IEPlugin(device=args.device, plugin_dirs=args.plugin_dir)
if args.cpu_extension and 'CPU' in args.device:
plugin.add_cpu_extension(args.cpu_extension)
# Read IR
net = IENetwork(model=model_xml, weights=model_bin)
if plugin.device == "CPU":
supported_layers = plugin.get_supported_layers(net)
not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers]
if len(not_supported_layers) != 0:
log.error("Following layers are not supported by the plugin for specified device {}:\n {}".
format(plugin.device, ', '.join(not_supported_layers)))
log.error("Please try to specify cpu extensions library path in demo's command line parameters using -l "
"or --cpu_extension command line argument")
sys.exit(1)
net_ops = set([l.type for l in net.layers.values()])
if not any([op == "Convolution" for op in net_ops]):
def main():
log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout)
args = build_argparser().parse_args()
model_xml = args.model
model_bin = os.path.splitext(model_xml)[0] + ".bin"
# Plugin initialization for specified device and load extensions library if specified
plugin = IEPlugin(device=args.device, plugin_dirs=args.plugin_dir)
# Configure plugin to support dynamic batch size
plugin.set_config({"DYN_BATCH_ENABLED": "YES"})
# Load cpu_extensions library if specified
if args.cpu_extension and 'CPU' in args.device:
plugin.add_cpu_extension(args.cpu_extension)
# Read IR
log.info("Loading network files:\n\t{}\n\t{}".format(model_xml, model_bin))
net = IENetwork(model=model_xml, weights=model_bin)
# Check for unsupported layers if the device is 'CPU'
if plugin.device == "CPU":
unsupported_layers = [layer for layer in net.layers if layer not in plugin.get_supported_layers(net)]
if len(unsupported_layers) != 0:
def build(cls, model_name, model_version, model_xml, model_bin,
mapping_config, batch_size_param, shape_param, num_ireq,
target_device, plugin_config):
plugin = IEPlugin(device=target_device,
plugin_dirs=GLOBAL_CONFIG['plugin_dir'])
if GLOBAL_CONFIG['cpu_extension'] and 'CPU' in target_device:
plugin.add_cpu_extension(GLOBAL_CONFIG['cpu_extension'])
net = IENetwork(model=model_xml, weights=model_bin)
batching_info = BatchingInfo(batch_size_param)
shape_info = ShapeInfo(shape_param, net.inputs)
if batching_info.mode == BatchingMode.FIXED:
net.batch_size = batching_info.batch_size
else:
batching_info.batch_size = net.batch_size
effective_batch_size = batching_info.get_effective_batch_size()
logger.debug("[Model: {}, version: {}] --- effective batch size - {}"
.format(model_name, model_version, effective_batch_size))
###############################
# Initial shape setup
def load_ir_model(model_xml, device):
model_bin = os.path.splitext(model_xml)[0] + ".bin"
# initialize plugin and read IR
plugin = IEPlugin(device=device)
net = IENetwork(model=model_xml, weights=model_bin)
exec_net = plugin.load(network=net)
input_blobs = net.inputs.keys()
inputs = [(b, net.inputs[b].shape) for b in input_blobs]
out_blob = next(iter(net.outputs))
del net
return exec_net, plugin, inputs, out_blob
def inference(args, model_xml, model_bin, inputs, outputs):
from openvino.inference_engine import IENetwork
from openvino.inference_engine import IEPlugin
plugin = IEPlugin(device=args.device, plugin_dirs=args.plugin_dir)
if args.cpu_extension and 'CPU' in args.device:
plugin.add_cpu_extension(args.cpu_extension)
log.info('Loading network files:\n\t{}\n\t{}'.format(model_xml, model_bin))
net = IENetwork(model=model_xml, weights=model_bin)
if plugin.device == 'CPU':
supported_layers = plugin.get_supported_layers(net)
not_supported_layers = [l for l in net.layers.keys() if
l not in supported_layers]
if not_supported_layers:
log.error('Folowing layers are not supported by the plugin for '
'specified device {}:\n {}'.format(
plugin.device, ', '.join(not_supported_layers)))
log.error('Please try to specify cpu extensions library path in '
'sample\'s command line parameters using '
'--cpu-extension command line argument')
def main():
log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout)
args = build_argparser().parse_args()
model_xml = args.model
model_bin = os.path.splitext(model_xml)[0] + ".bin"
preprocess_times = collections.deque()
infer_times = collections.deque()
postprocess_times = collections.deque()
ROIfile=open("ROIs.txt","w"); # output stored here, view with ROIviewer
# Plugin initialization for specified device and load extensions library if specified
log.info("Initializing plugin for {} device...".format(args.device))
plugin = IEPlugin(device=args.device, plugin_dirs=args.plugin_dir)
if args.cpu_extension and 'CPU' in args.device:
plugin.add_cpu_extension(args.cpu_extension)
# Read IR
log.info("Reading IR...")
net = IENetwork(model=model_xml, weights=model_bin)
if plugin.device == "CPU":
supported_layers = plugin.get_supported_layers(net)
not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers]
if len(not_supported_layers) != 0:
log.error("Following layers are not supported by the plugin for specified device {}:\n {}".
format(plugin.device, ', '.join(not_supported_layers)))
log.error("Please try to specify cpu extensions library path in demo's command line parameters using -l "
"or --cpu_extension command line argument")
return {'result': '', 'info_state': str(dev) + ' not a valid device', 'status': 'incomplete'}
else:
return {'result': '', 'info_state': str(model) + 'not a valid model', 'status': 'incomplete'}
# Object Detection via OpenVINO
net = IENetwork(model=path_to_xml, weights=path_to_bin)
input_layer = next(iter(net.inputs))
output_layer = next(iter(net.outputs))
n, c, h, w = net.inputs[input_layer].shape
# Pre-processing data
obj_in_frame = cv2.resize(obj_frame, (w, h))
obj_in_frame = obj_in_frame.transpose((2, 0, 1))
obj_in_frame = obj_in_frame.reshape((n, c, h, w))
obj_plugin = IEPlugin(device=dev.upper())
if dev.lower() == 'cpu':
obj_plugin.add_cpu_extension(extension)
# Loading into network plugin
obj_exec_net = obj_plugin.load(network=net, num_requests=1)
del net
obj_res = obj_exec_net.infer({'image_tensor': obj_in_frame})
obj_det = obj_res[output_layer]
initial_w = obj_frame.shape[1]
initial_h = obj_frame.shape[0]
preds = []
for obj in obj_det[0][0]:
# Draw only objects when probability more than specified threshold
if obj[2] > 0.5:
xmin = int(obj[3] * initial_w)
def greengrass_classification_sample_run():
client.publish(topic=PARAM_TOPIC_NAME, payload="OpenVINO: Initializing...")
model_bin = os.path.splitext(PARAM_MODEL_XML)[0] + ".bin"
# Plugin initialization for specified device and load extensions library if specified
plugin = IEPlugin(device=PARAM_DEVICE, plugin_dirs="")
if "CPU" in PARAM_DEVICE:
plugin.add_cpu_extension(PARAM_CPU_EXTENSION_PATH)
# Read IR
net = IENetwork(model=PARAM_MODEL_XML, weights=model_bin)
assert len(net.inputs.keys()) == 1, "Sample supports only single input topologies"
assert len(net.outputs) == 1, "Sample supports only single output topologies"
input_blob = next(iter(net.inputs))
out_blob = next(iter(net.outputs))
# Read and pre-process input image
n, c, h, w = net.inputs[input_blob]
cap = cv2.VideoCapture(PARAM_INPUT_SOURCE)
exec_net = plugin.load(network=net)
del net
client.publish(topic=PARAM_TOPIC_NAME, payload="Starting inference on %s" % PARAM_INPUT_SOURCE)
start_time = timeit.default_timer()
inf_seconds = 0.0