How to propagate neural network gradients through a Networkx graph?

I'm trying to train my neural network to make every edge's weight in the graph to be 10. I'm starting out by generating random points (inp), and make each 2 adjacent points (using idx) have an edge with weight = 1. And then if the 2 adjacent points already have an edge, the edge's weight is being sent to the NN that outputs what additional weight to add to it.

import warnings
warnings.filterwarnings("ignore", category=UserWarning)

import torch
import torch.nn as nn
import networkx as nx
import matplotlib.pyplot as plt
import torch.optim as optim

class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.fc1 = nn.Linear(1,3)
        self.fc2 = nn.Linear(3,1)

    def forward(self, x):
        x = self.fc1(x)
        x = self.fc2(x)
        return x

g = nx.DiGraph() 
model = Net()
optimizer = optim.Adam(model.parameters(), lr = 1e-3)

def training(n_iter):
    for epoch in range(n_iter):
        print(epoch)
        inp = torch.randint(0,10,(20,))
        idx = 0
        while idx < len (inp) - 1:
            if  g.has_edge(inp[idx].item(), inp[idx+1].item()): #edge exist

                edge_weight = g[inp[idx].item()][inp[idx+1].item()]["weight"]
                edge_weight_tensor = torch.tensor([edge_weight]).float() #to tensor
                
                added_edge_weight = model(edge_weight_tensor) #value from network
                g[inp[idx].item()][inp[idx+1].item()]["weight"] += added_edge_weight
                idx +=1
            else:
                g.add_edge(inp[idx].item(),inp[idx+1].item(), weight = 1)
                idx +=1

        edges = g.edges()
        weights = [g[u][v]['weight'] for u,v in edges]
        optimizer.zero_grad()

        loss_list = [w for w in weights if not isinstance(w, int)] #only take tensors
        try:
            loss_tensors = (torch.stack(loss_list, dim=0)-10) 
            loss_square = torch.square(loss_tensors)
            loss = torch.sum(loss_square)
            print(loss)
        except RuntimeError: #no tensors - hence create a 0 loss
            loss = torch.tensor(0.0, requires_grad = True)

        loss.backward(retain_graph=True)
        optimizer.step()
    return weights

weights = training(5)

#plot
plt.figure(figsize=(6,6))
pos = nx.spring_layout(g, k = 0.5) 

nx.draw(g, with_labels=True, node_color='skyblue', font_weight='bold',  width=weights, pos=pos)

My issue is that I'm not sure that the gradients can propagate this way, and also that it doesn't seem like I can add the NN weight addition to the edge's weight-- getting the following error:

RuntimeError: one of the variables needed for gradient computation has been modified by an inplace operation: [torch.FloatTensor [3, 1]], which is output 0 of TBackward, is at version 2; expected version 1 instead. Hint: enable anomaly detection to find the operation that failed to compute its gradient, with torch.autograd.set_detect_anomaly(True).


Read more here: https://stackoverflow.com/questions/67508925/how-to-propagate-neural-network-gradients-through-a-networkx-graph

Content Attribution

This content was originally published by Brad Will at Recent Questions - Stack Overflow, and is syndicated here via their RSS feed. You can read the original post over there.

%d bloggers like this: