From b4ee21cf966ee376846e1e5b771114e98d3c88d5 Mon Sep 17 00:00:00 2001 From: AvocadoTan <58587621+AvocadoTan@users.noreply.github.com> Date: Sun, 27 Dec 2020 14:39:02 +0800 Subject: [PATCH 1/6] Update match.yml --- match.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/match.yml b/match.yml index c03eb28f..c1f030fd 100644 --- a/match.yml +++ b/match.yml @@ -1,5 +1,6 @@ node_classification: - model: + - dropedge_gcn - gdc_gcn - gcn - gat @@ -24,7 +25,6 @@ node_classification: - ppnp - sgcpn - sgc - - dropedge_gcn - unet - pprgo dataset: From d1ebf51a41a99f95fee0ba33622ee7b244408845 Mon Sep 17 00:00:00 2001 From: AvocadoTan <58587621+AvocadoTan@users.noreply.github.com> Date: Sun, 27 Dec 2020 14:41:43 +0800 Subject: [PATCH 2/6] Create dropedge_gcn --- cogdl/models/nn/dropedge_gcn | 114 +++++++++++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) create mode 100644 cogdl/models/nn/dropedge_gcn diff --git a/cogdl/models/nn/dropedge_gcn b/cogdl/models/nn/dropedge_gcn new file mode 100644 index 00000000..b5f9c6c8 --- /dev/null +++ b/cogdl/models/nn/dropedge_gcn @@ -0,0 +1,114 @@ +import numpy as np +import torch +import torch.nn.functional as F +from .. import BaseModel, register_model +from .gcn import GraphConvolution +from cogdl.utils import add_remaining_self_loops, spmm, add_self_loops + + +def drop_edge(adj, adj_values, rate): + num_edge = adj.shape[1] + index_edge = np.arange(num_edge) + np.random.shuffle(index_edge) + select_edge = np.sort(index_edge[:int((1 - rate) * num_edge)]) + new_adj = adj[:, select_edge] + new_adj_values = adj_values[select_edge] + return new_adj, new_adj_values + + +def bingge_norm_adj(adj, adj_values, num_nodes): + adj, adj_values = add_self_loops(adj, adj_values, 1, num_nodes) + deg = spmm(adj, adj_values, torch.ones(num_nodes, 1).to(adj.device)).squeeze() + deg_sqrt = deg.pow(-1 / 2) + adj_values = deg_sqrt[adj[1]] * adj_values * deg_sqrt[adj[0]] + row, col = adj[0], adj[1] + mask = row != col + adj_values[row[mask]] += 1 + return adj, adj_values + + +def aug_norm_adj(adj, adj_values, num_nodes): + adj, adj_values = add_remaining_self_loops(adj, adj_values, 1, num_nodes) + deg = spmm(adj, adj_values, torch.ones(num_nodes, 1).to(adj.device)).squeeze() + deg_sqrt = deg.pow(-1 / 2) + adj_values = deg_sqrt[adj[1]] * adj_values * deg_sqrt[adj[0]] + return adj, adj_values + + +def get_normalizer(normalization): + normalizer_dict = dict(AugNorm=aug_norm_adj, + BinggeNorm=bingge_norm_adj) + if not normalization in normalizer_dict: + raise NotImplementedError + return normalizer_dict[normalization] + + +@register_model("dropedge_gcn") +class dropedge_gcn(BaseModel): + r"""The DropEdge GCN model from the `"DROPEDGE: TOWARDS DEEP GRAPH CONVOLUTIONAL NETWORKS ON NODE CLASSIFICATION" + `_ paper + + Args: + num_features (int) : Number of input features. + num_classes (int) : Number of classes. + hidden_size (int) : The dimension of node representation. + dropout (float) : Dropout rate for model training. + """ + + @staticmethod + def add_args(parser): + """Add model-specific arguments to the parser.""" + # fmt: off + parser.add_argument("--num-features", type=int) + parser.add_argument("--num-classes", type=int) + parser.add_argument("--hidden-size", type=int, default=64) + parser.add_argument("--dropout", type=float, default=0.5) + #DropEdge + parser.add_argument("--dropedge", type=float, default=0.0) + parser.add_argument("--normalization", type=str, default="AugNorm") + # fmt: on + + @classmethod + def build_model_from_args(cls, args): + return cls(args.num_features, args.hidden_size, args.num_classes, args.dropout,args.dropedge, + args.normalization) + + def __init__(self, nfeat, nhid, nclass, dropout,dropedge,normalization): + super(dropedge_gcn, self).__init__() + self.gc1 = GraphConvolution(nfeat, nhid) + self.gc2 = GraphConvolution(nhid, nclass) + self.dropout = dropout + self.dropedge = dropedge + self.normalization = normalization + # self.nonlinear = nn.SELU() + + def forward(self, x, adj): + device = x.device + adj_values = torch.ones(adj.shape[1]).to(device) + adj, adj_values = drop_edge(adj,adj_values,self.dropedge) + adj, adj_values = add_remaining_self_loops(adj, adj_values, 1, x.shape[0]) + adj, adj_values = get_normalizer(self.normalization)(adj, adj_values, x.shape[0]) + deg = spmm(adj, adj_values, torch.ones(x.shape[0], 1).to(device)).squeeze() + deg_sqrt = deg.pow(-1 / 2) + adj_values = deg_sqrt[adj[1]] * adj_values * deg_sqrt[adj[0]] + + x = F.dropout(x, self.dropout, training=self.training) + x = F.relu(self.gc1(x, adj, adj_values)) + # h1 = x + x = F.dropout(x, self.dropout, training=self.training) + x = self.gc2(x, adj, adj_values) + + # x = F.relu(x) + # x = torch.sigmoid(x) + # return x + # h2 = x + return F.log_softmax(x, dim=-1) + + def loss(self, data): + return F.nll_loss( + self.forward(data.x, data.edge_index)[data.train_mask], + data.y[data.train_mask], + ) + + def predict(self, data): + return self.forward(data.x, data.edge_index) From f32cf5c4488ddf7d9bea7ea6b393f8aa4db5bb66 Mon Sep 17 00:00:00 2001 From: AvocadoTan <58587621+AvocadoTan@users.noreply.github.com> Date: Sun, 27 Dec 2020 14:42:35 +0800 Subject: [PATCH 3/6] Rename dropedge_gcn to dropedge_gcnv2.py --- cogdl/models/nn/{dropedge_gcn => dropedge_gcnv2.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename cogdl/models/nn/{dropedge_gcn => dropedge_gcnv2.py} (100%) diff --git a/cogdl/models/nn/dropedge_gcn b/cogdl/models/nn/dropedge_gcnv2.py similarity index 100% rename from cogdl/models/nn/dropedge_gcn rename to cogdl/models/nn/dropedge_gcnv2.py From 4fff541dac77a53752a551be9b790b404cc9055d Mon Sep 17 00:00:00 2001 From: AvocadoTan <58587621+AvocadoTan@users.noreply.github.com> Date: Sun, 27 Dec 2020 14:47:36 +0800 Subject: [PATCH 4/6] Update test_node_classification.py --- tests/tasks/test_node_classification.py | 35 +++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/tests/tasks/test_node_classification.py b/tests/tasks/test_node_classification.py index 5adb8027..3283b109 100644 --- a/tests/tasks/test_node_classification.py +++ b/tests/tasks/test_node_classification.py @@ -667,7 +667,39 @@ def test_pprgo_cora(): task = build_task(args) ret = task.train() assert 0 <= ret["Acc"] <= 1 + + def test_dropedge_gcn_cora(): + args = get_default_args() + args.task = "node_classification" + args.dataset = "cora" + args.model = "dropedge_gcn" + args.dropedge = 0.5 + args.normalization = "AugNorm" + task = build_task(args) + ret = task.train() + assert 0 <= ret["Acc"] <= 1 + + def test_dropedge_gcn_citeseer(): + args = get_default_args() + args.task = "node_classification" + args.dataset = "citeseer" + args.model = "dropedge_gcn" + args.dropedge = 0.5 + args.normalization = "AugNorm" + task = build_task(args) + ret = task.train() + assert 0 <= ret["Acc"] <= 1 + def test_dropedge_gcn_pubmed(): + args = get_default_args() + args.task = "node_classification" + args.dataset = "pubmed" + args.model = "dropedge_gcn" + args.dropedge = 0.5 + args.normalization = "AugNorm" + task = build_task(args) + ret = task.train() + assert 0 <= ret["Acc"] <= 1 if __name__ == "__main__": test_gdc_gcn_cora() @@ -708,3 +740,6 @@ def test_pprgo_cora(): test_dropedge_densegcn_cora() test_unet_cora() test_pprgo_cora() + test_dropedge_gcn_cora() + test_dropedge_gcn_citeseer() + test_dropedge_gcn_pubmed() From 2defb835cacfc022748723ce368e8fc774d82e43 Mon Sep 17 00:00:00 2001 From: AvocadoTan <58587621+AvocadoTan@users.noreply.github.com> Date: Sun, 27 Dec 2020 18:22:51 +0800 Subject: [PATCH 5/6] Update test_node_classification.py --- tests/tasks/test_node_classification.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/tasks/test_node_classification.py b/tests/tasks/test_node_classification.py index 3283b109..b13647b0 100644 --- a/tests/tasks/test_node_classification.py +++ b/tests/tasks/test_node_classification.py @@ -646,7 +646,6 @@ def test_dropedge_inceptiongcn_cora(): ret = task.train() assert 0 <= ret["Acc"] <= 1 - def test_pprgo_cora(): args = get_default_args() args.cpu = True @@ -668,7 +667,7 @@ def test_pprgo_cora(): ret = task.train() assert 0 <= ret["Acc"] <= 1 - def test_dropedge_gcn_cora(): +def test_dropedge_gcn_cora(): args = get_default_args() args.task = "node_classification" args.dataset = "cora" @@ -679,7 +678,7 @@ def test_dropedge_gcn_cora(): ret = task.train() assert 0 <= ret["Acc"] <= 1 - def test_dropedge_gcn_citeseer(): +def test_dropedge_gcn_citeseer(): args = get_default_args() args.task = "node_classification" args.dataset = "citeseer" @@ -690,7 +689,7 @@ def test_dropedge_gcn_citeseer(): ret = task.train() assert 0 <= ret["Acc"] <= 1 - def test_dropedge_gcn_pubmed(): +def test_dropedge_gcn_pubmed(): args = get_default_args() args.task = "node_classification" args.dataset = "pubmed" @@ -700,7 +699,7 @@ def test_dropedge_gcn_pubmed(): task = build_task(args) ret = task.train() assert 0 <= ret["Acc"] <= 1 - + if __name__ == "__main__": test_gdc_gcn_cora() test_gcn_cora() From afeca298cd2b6a2659e8495f0f93d0723ab19768 Mon Sep 17 00:00:00 2001 From: AvocadoTan <58587621+AvocadoTan@users.noreply.github.com> Date: Sun, 27 Dec 2020 18:49:10 +0800 Subject: [PATCH 6/6] Update test_node_classification.py --- tests/tasks/test_node_classification.py | 307 ++++++------------------ 1 file changed, 68 insertions(+), 239 deletions(-) diff --git a/tests/tasks/test_node_classification.py b/tests/tasks/test_node_classification.py index b13647b0..e76ede9d 100644 --- a/tests/tasks/test_node_classification.py +++ b/tests/tasks/test_node_classification.py @@ -1,5 +1,4 @@ import torch -import torch.nn.functional as F from cogdl import options from cogdl.tasks import build_task from cogdl.datasets import build_dataset @@ -20,34 +19,31 @@ def get_default_args(): "lr": 0.01, "weight_decay": 5e-4, "missing_rate": -1, - "task": "node_classification", - "dataset": "cora", } return build_args_from_dict(default_dict) def test_gdc_gcn_cora(): args = get_default_args() - args.task = "node_classification" - args.dataset = "cora" - args.model = "gdc_gcn" + args.task = 'node_classification' + args.dataset = 'cora' + args.model = 'gdc_gcn' dataset = build_dataset(args) args.num_features = dataset.num_features args.num_classes = dataset.num_classes args.num_layers = 1 - args.alpha = 0.05 # ppr filter param - args.t = 5.0 # heat filter param - args.k = 128 # top k entries to be retained - args.eps = 0.01 # change depending on gdc_type + args.alpha = 0.05 # ppr filter param + args.t = 5.0 # heat filter param + args.k = 128 # top k entries to be retained + args.eps = 0.01 # change depending on gdc_type args.dataset = dataset - args.gdc_type = "ppr" # ppr, heat, none + args.gdc_type = 'ppr' # ppr, heat, none model = build_model(args) task = build_task(args, dataset=dataset, model=model) ret = task.train() assert 0 <= ret["Acc"] <= 1 - def test_gcn_cora(): args = get_default_args() args.task = "node_classification" @@ -80,6 +76,29 @@ def test_mlp_pubmed(): ret = task.train() assert 0 <= ret["Acc"] <= 1 +def test_dropedge_gcn_pubmed(): + args = get_default_args() + args.task = "node_classification" + args.dataset = "pubmed" + args.model = "dropedge_gcn" + args.dropedge = 0.5 + args.normalization = "AugNorm" + task = build_task(args) + ret = task.train() + assert 0 <= ret["Acc"] <= 1 + +def test_dropedge_gcn_citeseer(): + args = get_default_args() + args.task = "node_classification" + args.dataset = "citeseer" + args.model = "dropedge_gcn" + args.dropedge = 0.5 + args.normalization = "AugNorm" + task = build_task(args) + ret = task.train() + assert 0 <= ret["Acc"] <= 1 + + def test_mixhop_citeseer(): args = get_default_args() @@ -101,7 +120,7 @@ def test_pairnorm_cora_deepgcn(): args.pn_model = "DeepGCN" args.nlayer = 10 args.missing_rate = 100 - args.norm_mode = "PN-SI" + args.norm_mode = 'PN-SI' args.residual = 0 args.hidden_layers = 64 args.nhead = 1 @@ -110,7 +129,7 @@ def test_pairnorm_cora_deepgcn(): args.no_fea_norm = "store_false" task = build_task(args) ret = task.train() - assert 0 <= ret["Acc"] <= 1 + assert 0 <= ret["Acc"] <=1 def test_pairnorm_cora_gcn(): @@ -121,7 +140,7 @@ def test_pairnorm_cora_gcn(): args.pn_model = "GCN" args.nlayer = 10 args.missing_rate = 100 - args.norm_mode = "PN-SI" + args.norm_mode = 'PN-SI' args.residual = 0 args.hidden_layers = 64 args.nhead = 1 @@ -130,7 +149,7 @@ def test_pairnorm_cora_gcn(): args.no_fea_norm = "store_false" task = build_task(args) ret = task.train() - assert 0 <= ret["Acc"] <= 1 + assert 0 <= ret["Acc"] <=1 def test_pairnorm_cora_sgc(): @@ -141,7 +160,7 @@ def test_pairnorm_cora_sgc(): args.pn_model = "SGC" args.nlayer = 10 args.missing_rate = 100 - args.norm_mode = "PN-SI" + args.norm_mode = 'PN-SI' args.residual = 0 args.hidden_layers = 64 args.nhead = 1 @@ -150,7 +169,7 @@ def test_pairnorm_cora_sgc(): args.no_fea_norm = "store_false" task = build_task(args) ret = task.train() - assert 0 <= ret["Acc"] <= 1 + assert 0 <= ret["Acc"] <=1 def test_pairnorm_cora_deepgat(): @@ -161,33 +180,28 @@ def test_pairnorm_cora_deepgat(): args.pn_model = "DeepGAT" args.nlayer = 10 args.missing_rate = 100 - args.norm_mode = "PN-SI" + args.norm_mode = 'PN-SI' args.residual = 0 args.hidden_layers = 64 - args.nhead = 2 + args.nhead = 1 args.dropout = 0.6 args.norm_scale = 1.0 args.no_fea_norm = "store_false" task = build_task(args) ret = task.train() - assert 0 <= ret["Acc"] <= 1 + assert 0 <= ret["Acc"] <=1 def test_graphsage_cora(): args = get_default_args() args.task = "node_classification" + args.dataset = "cora" args.model = "graphsage" - args.batch_size = 256 args.num_layers = 2 - args.patience = 1 - args.max_epoch = 5 - args.hidden_size = [32, 32] - args.sample_size = [3, 5] - args.num_workers = 1 - for dataset in ["cora", "pubmed"]: - args.dataset = dataset - task = build_task(args) - ret = task.train() + args.hidden_size = [128] + args.sample_size = [10, 10] + task = build_task(args) + ret = task.train() assert 0 <= ret["Acc"] <= 1 @@ -260,25 +274,8 @@ def test_pyg_unet_cora(): args = get_default_args() args.task = "node_classification" args.dataset = "cora" - args.model = "pyg_unet" - args.num_layers = 2 - task = build_task(args) - ret = task.train() - assert 0 <= ret["Acc"] <= 1 - - -def test_unet_cora(): - args = get_default_args() - args.cpu = True args.model = "unet" - args.pool_rate = [0.5, 0.5] - args.n_pool = 2 - args.adj_dropout = 0.3 - args.n_dropout = 0.8 - args.hidden_size = 16 - args.improved = True - args.aug_adj = True - args.activation = "elu" + args.num_layers = 2 task = build_task(args) ret = task.train() assert 0 <= ret["Acc"] <= 1 @@ -465,71 +462,47 @@ def test_gpt_gnn_cora(): ret = task.train() assert 0 <= ret["Acc"] <= 1 - -def test_sign_cora(): - args = get_default_args() - args.task = "node_classification" - args.model = "sign" - args.dataset = "cora" - args.lr = 0.00005 - args.hidden_size = 2048 - args.num_layers = 3 - args.num_propagations = 3 - args.dropout = 0.3 - args.directed = False - args.dropedge_rate = 0.2 - args.asymm_norm = False - args.set_diag = False - args.remove_diag = False - task = build_task(args) - ret = task.train() - assert 0 < ret["Acc"] < 1 - - def test_jknet_jknet_cora(): args = get_default_args() args.task = "node_classification" args.dataset = "jknet_cora" args.model = "jknet" args.lr = 0.005 - args.layer_aggregation = "maxpool" - args.node_aggregation = "sum" - args.n_layers = 3 + args.layer_aggregation = 'maxpool' + args.node_aggregation = 'sum' + args.n_layers = 6 args.n_units = 16 args.in_features = 1433 args.out_features = 7 - args.max_epoch = 2 + args.max_epoch = 100 task = build_task(args) ret = task.train() assert 0 <= ret["Acc"] <= 1 - -def test_ppnp_cora(): +def test_ppnp_citeseer(): args = get_default_args() - args.task = "node_classification" - args.model = "ppnp" - args.dataset = "cora" - args.propagation_type = "ppnp" + args.task = 'node_classification' + args.model = 'ppnp' + args.dataset = 'citeseer' + args.propagation_type = 'ppnp' args.alpha = 0.1 args.num_iterations = 10 task = build_task(args) ret = task.train() - assert 0 < ret["Acc"] < 1 - + assert 0 < ret['Acc'] < 1 -def test_appnp_cora(): +def test_appnp_citeseer(): args = get_default_args() - args.task = "node_classification" - args.model = "ppnp" - args.dataset = "cora" - args.propagation_type = "appnp" + args.task = 'node_classification' + args.model = 'ppnp' + args.dataset = 'citeseer' + args.propagation_type = 'appnp' args.alpha = 0.1 args.num_iterations = 10 task = build_task(args) ret = task.train() - assert 0 < ret["Acc"] < 1 - - + assert 0 < ret['Acc'] < 1 + def test_sgcpn_cora(): args = get_default_args() args.dataset = "cora" @@ -543,8 +516,7 @@ def test_sgcpn_cora(): task = build_task(args) ret = task.train() assert 0 < ret["Acc"] < 1 - - + def test_sgc_cora(): args = get_default_args() args.task = "node_classification" @@ -553,120 +525,7 @@ def test_sgc_cora(): task = build_task(args) ret = task.train() assert 0 <= ret["Acc"] <= 1 - - -def test_dropedge_gcn_cora(): - args = get_default_args() - args.task = "node_classification" - args.dataset = "cora" - args.model = "dropedge_gcn" - args.baseblock = "mutigcn" - args.inputlayer = "gcn" - args.outputlayer = "gcn" - args.hidden_size = 64 - args.dropout = 0.5 - args.withbn = False - args.withloop = False - args.nhiddenlayer = 1 - args.nbaseblocklayer = 1 - args.aggrmethod = "default" - args.activation = F.relu - args.task_type = "full" - - task = build_task(args) - ret = task.train() - assert 0 <= ret["Acc"] <= 1 - - -def test_dropedge_resgcn_cora(): - args = get_default_args() - args.task = "node_classification" - args.dataset = "cora" - args.model = "dropedge_gcn" - args.baseblock = "resgcn" - args.inputlayer = "gcn" - args.outputlayer = "gcn" - args.hidden_size = 64 - args.dropout = 0.5 - args.withbn = False - args.withloop = False - args.nhiddenlayer = 1 - args.nbaseblocklayer = 1 - args.aggrmethod = "concat" - args.activation = F.relu - args.task_type = "full" - - task = build_task(args) - ret = task.train() - assert 0 <= ret["Acc"] <= 1 - - -def test_dropedge_densegcn_cora(): - args = get_default_args() - args.task = "node_classification" - args.dataset = "cora" - args.model = "dropedge_gcn" - args.baseblock = "densegcn" - args.inputlayer = "" - args.outputlayer = "none" - args.hidden_size = 64 - args.dropout = 0.5 - args.withbn = False - args.withloop = False - args.nhiddenlayer = 1 - args.nbaseblocklayer = 1 - args.aggrmethod = "add" - args.activation = F.relu - args.task_type = "full" - - task = build_task(args) - ret = task.train() - assert 0 <= ret["Acc"] <= 1 - - -def test_dropedge_inceptiongcn_cora(): - args = get_default_args() - args.task = "node_classification" - args.dataset = "cora" - args.model = "dropedge_gcn" - args.baseblock = "inceptiongcn" - args.inputlayer = "gcn" - args.outputlayer = "gcn" - args.hidden_size = 64 - args.dropout = 0.5 - args.withbn = False - args.withloop = False - args.nhiddenlayer = 1 - args.nbaseblocklayer = 1 - args.aggrmethod = "add" - args.activation = F.relu - args.task_type = "full" - - task = build_task(args) - ret = task.train() - assert 0 <= ret["Acc"] <= 1 - -def test_pprgo_cora(): - args = get_default_args() - args.cpu = True - args.task = "node_classification" - args.dataset = "cora" - args.model = "pprgo" - args.k = 32 - args.alpha = 0.5 - args.eval_step = 1 - args.batch_size = 32 - args.test_batch_size = 128 - args.activation = "relu" - args.num_layers = 2 - args.nprop_inference = 2 - args.eps = 0.001 - for norm in ["sym", "row"]: - args.norm = norm - task = build_task(args) - ret = task.train() - assert 0 <= ret["Acc"] <= 1 - + def test_dropedge_gcn_cora(): args = get_default_args() args.task = "node_classification" @@ -678,28 +537,6 @@ def test_dropedge_gcn_cora(): ret = task.train() assert 0 <= ret["Acc"] <= 1 -def test_dropedge_gcn_citeseer(): - args = get_default_args() - args.task = "node_classification" - args.dataset = "citeseer" - args.model = "dropedge_gcn" - args.dropedge = 0.5 - args.normalization = "AugNorm" - task = build_task(args) - ret = task.train() - assert 0 <= ret["Acc"] <= 1 - -def test_dropedge_gcn_pubmed(): - args = get_default_args() - args.task = "node_classification" - args.dataset = "pubmed" - args.model = "dropedge_gcn" - args.dropedge = 0.5 - args.normalization = "AugNorm" - task = build_task(args) - ret = task.train() - assert 0 <= ret["Acc"] <= 1 - if __name__ == "__main__": test_gdc_gcn_cora() test_gcn_cora() @@ -728,17 +565,9 @@ def test_dropedge_gcn_pubmed(): test_grand_cora() test_pyg_gcn_cora_sampler() test_gpt_gnn_cora() - test_sign_cora() test_jknet_jknet_cora() - test_sgcpn_cora() - test_ppnp_cora() - test_appnp_cora() - test_dropedge_gcn_cora() - test_dropedge_resgcn_cora() - test_dropedge_inceptiongcn_cora() - test_dropedge_densegcn_cora() - test_unet_cora() - test_pprgo_cora() + test_ppnp_citeseer() + test_appnp_citeseer() test_dropedge_gcn_cora() - test_dropedge_gcn_citeseer() test_dropedge_gcn_pubmed() + test_dropedge_gcn_citeseer()