From da64000fa56cc85b0859bc17cb16a3d753b8304a Mon Sep 17 00:00:00 2001 From: Atila Orhon Date: Mon, 8 Aug 2022 21:03:14 -0700 Subject: [PATCH] fix relative import --- ane_transformers/_version.py | 2 +- ane_transformers/reference/decoder.py | 2 +- ane_transformers/reference/ffn.py | 2 +- ane_transformers/reference/multihead_attention.py | 2 +- ane_transformers/reference/transformer.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ane_transformers/_version.py b/ane_transformers/_version.py index a73e18e..12d06bf 100644 --- a/ane_transformers/_version.py +++ b/ane_transformers/_version.py @@ -1 +1 @@ -__version__ = "0.1.2" +__version__ = "0.1.3" diff --git a/ane_transformers/reference/decoder.py b/ane_transformers/reference/decoder.py index b2a9189..aea4595 100644 --- a/ane_transformers/reference/decoder.py +++ b/ane_transformers/reference/decoder.py @@ -12,7 +12,7 @@ import torch import torch.nn as nn -from layer_norm import LayerNormANE +from .layer_norm import LayerNormANE class TransformerDecoder(nn.Module): diff --git a/ane_transformers/reference/ffn.py b/ane_transformers/reference/ffn.py index 7e3a7d3..7971439 100644 --- a/ane_transformers/reference/ffn.py +++ b/ane_transformers/reference/ffn.py @@ -6,7 +6,7 @@ import torch import torch.nn as nn -from layer_norm import LayerNormANE +from .layer_norm import LayerNormANE class FFN(nn.Module): diff --git a/ane_transformers/reference/multihead_attention.py b/ane_transformers/reference/multihead_attention.py index f3b4e51..6498942 100644 --- a/ane_transformers/reference/multihead_attention.py +++ b/ane_transformers/reference/multihead_attention.py @@ -6,7 +6,7 @@ import torch import torch.nn as nn -from layer_norm import LayerNormANE +from .layer_norm import LayerNormANE class MultiHeadAttention(nn.Module): diff --git a/ane_transformers/reference/transformer.py b/ane_transformers/reference/transformer.py index f363ca6..d70da74 100644 --- a/ane_transformers/reference/transformer.py +++ b/ane_transformers/reference/transformer.py @@ -12,7 +12,7 @@ import torch import torch.nn as nn -import encoder, decoder, multihead_attention, ffn +from . import encoder, decoder, multihead_attention, ffn class AppleNeuralEngineTransformer(nn.Module):