Skip to content

Model

Embedding model for Highway2Vec.

This module contains the embedding model from the highway2vec paper [1].

References
  1. https://doi.org/10.1145/3557918.3565865

Highway2VecModel(
    n_features, n_hidden=64, n_embed=30, lr=0.001
)

Bases: Model

Autoencoder based embedding model for highway2vec.

PARAMETER DESCRIPTION
n_features

Number of features.

TYPE: int

n_hidden

Number of hidden units. Defaults to 64.

TYPE: int DEFAULT: 64

n_embed

Embedding size. Defaults to 30.

TYPE: int DEFAULT: 30

lr

Learning rate. Defaults to 1e-3.

TYPE: float DEFAULT: 0.001

Source code in srai/embedders/highway2vec/model.py
def __init__(self, n_features: int, n_hidden: int = 64, n_embed: int = 30, lr: float = 1e-3):
    """
    Init Highway2VecModel.

    Args:
        n_features (int): Number of features.
        n_hidden (int, optional): Number of hidden units. Defaults to 64.
        n_embed (int, optional): Embedding size. Defaults to 30.
        lr (float, optional): Learning rate. Defaults to 1e-3.
    """
    import_optional_dependencies(
        dependency_group="torch", modules=["torch", "pytorch_lightning"]
    )
    from torch import nn

    super().__init__()

    self.save_hyperparameters()

    self.encoder = nn.Sequential(
        nn.Linear(n_features, n_hidden),
        nn.ReLU(),
        nn.Linear(n_hidden, n_embed),
    )
    self.decoder = nn.Sequential(
        nn.Linear(n_embed, n_hidden),
        nn.ReLU(),
        nn.Linear(n_hidden, n_features),
    )
    self.n_features = n_features
    self.n_hidden = n_hidden
    self.n_embed = n_embed
    self.lr = lr

get_config()

Get model config.

Source code in srai/embedders/_base.py
def get_config(self) -> dict[str, Any]:
    """Get model config."""
    model_config = {
        k: v
        for k, v in vars(self).items()
        if k[0] != "_"
        and k
        not in (
            "training",
            "prepare_data_per_node",
            "allow_zero_length_dataloader_with_multiple_devices",
        )
    }

    return model_config

save(path)

Save the model to a directory.

PARAMETER DESCRIPTION
path

Path to the directory.

TYPE: Path

Source code in srai/embedders/_base.py
def save(self, path: Union[Path, str]) -> None:
    """
    Save the model to a directory.

    Args:
        path (Path): Path to the directory.
    """
    import torch

    torch.save(self.state_dict(), path)

load(path, **kwargs)

classmethod

Load model from a file.

PARAMETER DESCRIPTION
path

Path to the file.

TYPE: Union[Path, str]

**kwargs

Additional kwargs to pass to the model constructor.

TYPE: dict DEFAULT: {}

Source code in srai/embedders/_base.py
@classmethod
def load(cls, path: Union[Path, str], **kwargs: Any) -> "Model":
    """
    Load model from a file.

    Args:
        path (Union[Path, str]): Path to the file.
        **kwargs (dict): Additional kwargs to pass to the model constructor.
    """
    import torch

    if isinstance(path, str):
        path = Path(path)

    model = cls(**kwargs)
    model.load_state_dict(torch.load(path))
    return model

forward(x)

Forward pass.

PARAMETER DESCRIPTION
x

Input tensor.

TYPE: Tensor

Source code in srai/embedders/highway2vec/model.py
def forward(self, x: "torch.Tensor") -> "torch.Tensor":
    """
    Forward pass.

    Args:
        x (torch.Tensor): Input tensor.
    """
    z: torch.Tensor = self.encoder(x)
    return z

training_step(batch, batch_idx)

Training step.

PARAMETER DESCRIPTION
batch

Batch.

TYPE: Tensor

batch_idx

Batch index.

TYPE: int

Source code in srai/embedders/highway2vec/model.py
def training_step(self, batch: "torch.Tensor", batch_idx: int) -> "torch.Tensor":
    """
    Training step.

    Args:
        batch (torch.Tensor): Batch.
        batch_idx (int): Batch index.
    """
    return self._common_step(batch, batch_idx, "train")

configure_optimizers()

Configure optimizer.

Source code in srai/embedders/highway2vec/model.py
def configure_optimizers(self) -> "torch.optim.Optimizer":
    """Configure optimizer."""
    import torch

    optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)
    return optimizer