Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numbers
import numpy as np
from x2paddle.decoder.caffe_decoder import CaffeGraph
from x2paddle.core.op_mapper import OpMapper
from x2paddle.core.util import *
from x2paddle.op_mapper import caffe_shape
from x2paddle.op_mapper.caffe_custom_layer import *
class CaffeOpMapper(OpMapper):
directly_map_ops = {
'ReLU': 'relu',
'AbsVal': 'abs',
'Sigmoid': 'sigmoid',
'TanH': 'tanh',
}
def __init__(self, decoder):
super(CaffeOpMapper, self).__init__()
self.graph = decoder.caffe_graph
self.weights = dict()
resolver = decoder.resolver
self.used_custom_layers = {}
print("Total nodes: {}".format(len(self.graph.topo_sort)))
for node_name in self.graph.topo_sort:
import numpy
import sys
# compute padding size for SAME mode
def get_same_padding(in_size, kernel_size, stride):
new_size = int(math.ceil(in_size * 1.0 / stride))
pad_size = (new_size - 1) * stride + kernel_size - in_size
if pad_size < 0:
pad_size = 0
pad0 = int(pad_size / 2)
pad1 = pad_size - pad0
return [pad0, pad1]
class TFOpMapperNHWC(OpMapper):
directly_map_ops = {
'Relu': ['relu'],
'Relu6': ['relu6'],
'Shape': ['shape'],
'Abs': ['abs'],
'Sigmoid': ['sigmoid'],
'Exp': ['exp'],
'Rsqrt': ['rsqrt'],
'swish_f32': ['swish'],
'Tanh': ['tanh'],
'LeakyRelu': ['leaky_relu', {
'alpha': 'alpha'
}]
}
elementwise_ops = {
'Add': 'elementwise_add',
if 'Constant' in node.layer_name:
return node.value
if isinstance(node, ONNXGraphDataNode):
return node.weight
return None
def get_same_padding(in_size, kernel_size, stride):
new_size = int(math.ceil(in_size * 1.0 / stride))
pad_size = (new_size - 1) * stride + kernel_size - in_size
pad0 = int(pad_size / 2)
pad1 = pad_size - pad0
return [pad0, pad1]
class ONNXOpMapper(OpMapper):
elementwise_ops = {
'Add': 'elementwise_add',
'Div': 'elementwise_div',
'Sub': 'elementwise_sub',
'Mul': 'elementwise_mul',
'Pow': 'elementwise_pow',
}
def __init__(self, decoder, save_dir):
super(ONNXOpMapper, self).__init__()
self.decoder = decoder
self.graph = decoder.onnx_graph
self.input_shapes = []
self.weights = dict()
self.omit_nodes = list()
self.used_custom_layers = dict()
for i in range(len(dim)):
char = tf_data_format[dim[i]]
dim[i] = pd_data_format.index(char)
else:
char = tf_data_format[dim]
dim = pd_data_format.index(char)
return dim
if dim < 0:
dim += 4
if dim > 0:
dim = (dim + 1) % 4 + int((dim + 1) / 4)
return dim
class TFOpMapper(OpMapper):
directly_map_ops = {
'Relu': ['relu'],
'Relu6': ['relu6'],
'Shape': ['shape'],
'Abs': ['abs'],
'Sigmoid': ['sigmoid'],
'Exp': ['exp'],
'Rsqrt': ['rsqrt'],
'swish_f32': ['swish'],
'Tanh': ['tanh'],
'LeakyRelu': ['leaky_relu', {
'alpha': 'alpha'
}]
}
elementwise_ops = {
'Add': 'elementwise_add',
for i in range(len(dim)):
char = tf_data_format[dim[i]]
dim[i] = pd_data_format.index(char)
else:
char = tf_data_format[dim]
dim = pd_data_format.index(char)
return dim
if dim < 0:
dim += 4
if dim > 0:
dim = (dim + 1) % 4 + int((dim + 1) / 4)
return dim
class TFOpMapper(OpMapper):
directly_map_ops = {
'Relu': ['relu'],
'Relu6': ['relu6'],
'Shape': ['shape'],
'Abs': ['abs'],
'Sigmoid': ['sigmoid'],
'Exp': ['exp'],
'Rsqrt': ['rsqrt'],
'swish_f32': ['swish'],
'Tanh': ['tanh'],
'LeakyRelu': ['leaky_relu', {
'alpha': 'alpha'
}]
}
elementwise_ops = {
'Add': 'elementwise_add',