Skip to content

novae.module.GraphEncoder

novae.module.GraphEncoder

Bases: LightningModule

Graph encoder of Novae. It uses a graph attention network.

Source code in novae/module/encode.py
class GraphEncoder(L.LightningModule):
    """Graph encoder of Novae. It uses a graph attention network."""

    @utils.format_docs
    def __init__(
        self,
        embedding_size: int,
        hidden_size: int,
        num_layers: int,
        output_size: int,
        heads: int,
    ) -> None:
        """
        Args:
            {embedding_size}
            hidden_size: The size of the hidden layers in the GAT.
            num_layers: The number of layers in the GAT.
            {output_size}
            heads: The number of attention heads in the GAT.
        """
        super().__init__()
        self.gnn = GAT(
            embedding_size,
            hidden_channels=hidden_size,
            num_layers=num_layers,
            out_channels=output_size,
            edge_dim=1,
            v2=True,
            heads=heads,
            act="ELU",
        )

        self.node_aggregation = AttentionAggregation(output_size)

    @utils.format_docs
    def forward(self, data: Data) -> Tensor:
        """Encode the input data.

        Args:
            {data}

        Returns:
            A tensor of shape `(B, O)` containing the encoded graphs.
        """
        out = self.gnn(x=data.x, edge_index=data.edge_index, edge_attr=data.edge_attr)
        return self.node_aggregation(out, index=data.batch)

__init__(embedding_size, hidden_size, num_layers, output_size, heads)

Parameters:

Name Type Description Default
embedding_size int

Size of the embeddings of the genes (E in the article).

required
hidden_size int

The size of the hidden layers in the GAT.

required
num_layers int

The number of layers in the GAT.

required
output_size int

Size of the representations, i.e. the encoder outputs (O in the article).

required
heads int

The number of attention heads in the GAT.

required
Source code in novae/module/encode.py
@utils.format_docs
def __init__(
    self,
    embedding_size: int,
    hidden_size: int,
    num_layers: int,
    output_size: int,
    heads: int,
) -> None:
    """
    Args:
        {embedding_size}
        hidden_size: The size of the hidden layers in the GAT.
        num_layers: The number of layers in the GAT.
        {output_size}
        heads: The number of attention heads in the GAT.
    """
    super().__init__()
    self.gnn = GAT(
        embedding_size,
        hidden_channels=hidden_size,
        num_layers=num_layers,
        out_channels=output_size,
        edge_dim=1,
        v2=True,
        heads=heads,
        act="ELU",
    )

    self.node_aggregation = AttentionAggregation(output_size)

forward(data)

Encode the input data.

Parameters:

Name Type Description Default
data Data

A Pytorch Geometric Data object representing a batch of B graphs.

required

Returns:

Type Description
Tensor

A tensor of shape (B, O) containing the encoded graphs.

Source code in novae/module/encode.py
@utils.format_docs
def forward(self, data: Data) -> Tensor:
    """Encode the input data.

    Args:
        {data}

    Returns:
        A tensor of shape `(B, O)` containing the encoded graphs.
    """
    out = self.gnn(x=data.x, edge_index=data.edge_index, edge_attr=data.edge_attr)
    return self.node_aggregation(out, index=data.batch)