Skip to content

Model

Embedding model for gtfs2vec.

This module contains embedding model from gtfs2vec paper [1].

References
  1. https://doi.org/10.1145/3486640.3491392

GTFS2VecModel(n_features, n_hidden=48, n_embed=64)

Bases: Model

Autoencoder based embedding model for gtfs2vec.

PARAMETER DESCRIPTION
n_features

Number of features.

TYPE: int

n_hidden

Number of hidden units. Defaults to 48.

TYPE: int DEFAULT: 48

n_embed

Embedding size. Defaults to 64.

TYPE: int DEFAULT: 64

Source code in srai/embedders/gtfs2vec/model.py
def __init__(
    self,
    n_features: int,
    n_hidden: int = 48,
    n_embed: int = 64,
) -> None:
    """
    Init GTFS2VecModel.

    Args:
        n_features (int): Number of features.
        n_hidden (int, optional): Number of hidden units. Defaults to 48.
        n_embed (int, optional): Embedding size. Defaults to 64.
    """
    super().__init__()
    import_optional_dependencies(
        dependency_group="torch", modules=["torch", "pytorch_lightning"]
    )
    from torch import nn

    self.n_features = n_features
    self.n_hidden = n_hidden
    self.n_embed = n_embed
    self.encoder = nn.Sequential(
        nn.Linear(n_features, n_hidden), nn.ReLU(), nn.Linear(n_hidden, n_embed)
    )
    self.decoder = nn.Sequential(
        nn.Linear(n_embed, n_hidden), nn.ReLU(), nn.Linear(n_hidden, n_features)
    )

get_config()

Get model config.

Source code in srai/embedders/_base.py
def get_config(self) -> dict[str, Any]:
    """Get model config."""
    model_config = {
        k: v
        for k, v in vars(self).items()
        if k[0] != "_"
        and k
        not in (
            "training",
            "prepare_data_per_node",
            "allow_zero_length_dataloader_with_multiple_devices",
        )
    }

    return model_config

save(path)

Save the model to a directory.

PARAMETER DESCRIPTION
path

Path to the directory.

TYPE: Path

Source code in srai/embedders/_base.py
def save(self, path: Union[Path, str]) -> None:
    """
    Save the model to a directory.

    Args:
        path (Path): Path to the directory.
    """
    import torch

    torch.save(self.state_dict(), path)

load(path, **kwargs)

classmethod

Load model from a file.

PARAMETER DESCRIPTION
path

Path to the file.

TYPE: Union[Path, str]

**kwargs

Additional kwargs to pass to the model constructor.

TYPE: dict DEFAULT: {}

Source code in srai/embedders/_base.py
@classmethod
def load(cls, path: Union[Path, str], **kwargs: Any) -> "Model":
    """
    Load model from a file.

    Args:
        path (Union[Path, str]): Path to the file.
        **kwargs (dict): Additional kwargs to pass to the model constructor.
    """
    import torch

    if isinstance(path, str):
        path = Path(path)

    model = cls(**kwargs)
    model.load_state_dict(torch.load(path))
    return model

forward(x)

Forward pass.

PARAMETER DESCRIPTION
x

Input tensor.

TYPE: Tensor

Source code in srai/embedders/gtfs2vec/model.py
def forward(self, x: "torch.Tensor") -> "torch.Tensor":
    """
    Forward pass.

    Args:
        x (torch.Tensor): Input tensor.
    """
    embedding: torch.Tensor = self.encoder(x)
    return embedding

configure_optimizers()

Configure optimizer.

Source code in srai/embedders/gtfs2vec/model.py
def configure_optimizers(self) -> "torch.optim.Optimizer":
    """Configure optimizer."""
    import torch

    optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
    return optimizer

training_step(batch, batch_idx)

Training step.

PARAMETER DESCRIPTION
batch

Batch.

TYPE: Tensor

batch_idx

Batch index.

TYPE: Any

Source code in srai/embedders/gtfs2vec/model.py
def training_step(self, batch: "torch.Tensor", batch_idx: Any) -> "torch.Tensor":
    """
    Training step.

    Args:
        batch (torch.Tensor): Batch.
        batch_idx (Any): Batch index.
    """
    from torch.nn import functional as F

    x = batch
    z = self.encoder(x)
    x_hat = self.decoder(z)
    loss = F.mse_loss(x_hat, x)
    self.log("train_loss", loss)
    return loss