Created
May 26, 2019 13:33
-
-
Save khuangaf/3a7a5703c296084d10239620fb833990 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
from torch.nn import Sequential as Seq, Linear, ReLU | |
from torch_geometric.nn import MessagePassing | |
class SAGEConv(MessagePassing): | |
def __init__(self, in_channels, out_channels): | |
super(SAGEConv, self).__init__(aggr='max') # "Max" aggregation. | |
self.lin = torch.nn.Linear(in_channels, out_channels) | |
self.act = torch.nn.ReLU() | |
self.update_weight = Parameter(torch.Tensor(in_channels + out_channels, in_channels)) | |
self.update_act = torch.nn.ReLU() | |
def forward(self, x, edge_index): | |
# x has shape [N, in_channels] | |
# edge_index has shape [2, E] | |
edge_index, _ = remove_self_loops(edge_index) | |
edge_index, _ = add_self_loops(edge_index, num_nodes=x.size(0)) | |
return self.propagate(edge_index, size=(x.size(0), x.size(0)), x=x) | |
def message(self, x_j): | |
# x_j has shape [E, in_channels] | |
x_j = self.lin(x_j) | |
x_j = self.act(x_j) | |
return x_j | |
def update(self, aggr_out, x): | |
# aggr_out has shape [N, out_channels] | |
new_embedding = torch.cat([aggr_out, x], dim=1) | |
new_embedding = torch.matmul(new_embedding, self.update_weight) | |
new_embedding = torch.update_act(new_embedding) | |
return new_embedding |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment