Source code for pipeline.Embedding.models.layerless_embedding
import typing
import torch
from utils.modelutils.mlp import make_mlp
from ..embedding_base import EmbeddingBase
[docs]class LayerlessEmbedding(EmbeddingBase):
def __init__(self, hparams: typing.Dict[str, typing.Any]):
super().__init__(hparams)
"""
Initialise the Lightning Module that can scan over different embedding training regimes
"""
# Construct the MLP architecture
self.network = make_mlp(
self.get_n_features(),
[self.n_hiddens] * self.n_layers + [hparams["emb_dim"]],
hidden_activation=hparams["activation"],
output_activation=None,
layer_norm=hparams.get("layernorm", True),
)
self.save_hyperparameters()
@property
def n_hiddens(self) -> int:
"""Number of hidden units for each hidden layer.
Correspond to the hyperparameter ``n_hiddens``. Falls back to ``emb_hidden``
otherwise.
"""
n_hiddens = self.hparams.get("n_hiddens", self.hparams.get("emb_hidden"))
if n_hiddens is None:
raise ValueError(
"The number of hidden units was not provided through "
"the hyperparameter `n_hiddens`"
)
else:
return n_hiddens
@property
def n_layers(self) -> int:
"""Number of layers.
Correspond to the hyperparameter ``n_layers``. Falls back to ``nb_layer``
otherwise.
"""
n_layers = self.hparams.get("n_layers", self.hparams.get("nb_layer"))
if n_layers is None:
raise ValueError(
"The number of hidden units was not provided through "
"the hyperparameter `n_layers`"
)
else:
return n_layers
[docs] def forward(self, x):
x_out = self.network(x)
if self.hparams.get("normalize_output", False):
return torch.nn.functional.normalize(x_out)
else:
return x_out