Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self.self_attention = attention_spec.MultiHeadAttentionSpec(self_attention=True)
self.ffn = FeedForwardSpec()
class TransformerDecoderLayerSpec(model_spec.LayerSpec):
def __init__(self):
self.self_attention = attention_spec.MultiHeadAttentionSpec(self_attention=True)
self.attention = attention_spec.MultiHeadAttentionSpec()
self.ffn = FeedForwardSpec()
class FeedForwardSpec(model_spec.LayerSpec):
def __init__(self):
self.layer_norm = common_spec.LayerNormSpec()
self.linear_0 = common_spec.LinearSpec()
self.linear_1 = common_spec.LinearSpec()
class PositionEncoderSpec(model_spec.LayerSpec):
def __init__(self):
self.encodings = model_spec.OPTIONAL