Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Misc] Mofidy cugraph tests to avoid import dgl #7075

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions tests/cugraph/cugraph-ops/test_cugraph_gatconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
from collections import OrderedDict
from itertools import product

import dgl
import pytest
import torch

from dgl import graph, to_block as dgl_to_block
from dgl.nn import CuGraphGATConv, GATConv

options = OrderedDict(
Expand All @@ -20,7 +21,7 @@
def generate_graph():
u = torch.tensor([0, 1, 0, 2, 3, 0, 4, 0, 5, 0, 6, 7, 0, 8, 9])
v = torch.tensor([1, 9, 2, 9, 9, 4, 9, 5, 9, 6, 9, 9, 8, 9, 0])
g = dgl.graph((u, v))
g = graph((u, v))
return g


Expand All @@ -34,7 +35,7 @@ def test_gatconv_equality(idtype_int, max_in_degree, num_heads, to_block):
if idtype_int:
g = g.int()
if to_block:
g = dgl.to_block(g)
g = dgl_to_block(g)
feat = torch.rand(g.num_src_nodes(), in_feat).to(device)

torch.manual_seed(0)
Expand Down
13 changes: 7 additions & 6 deletions tests/cugraph/cugraph-ops/test_cugraph_relgraphconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
from collections import OrderedDict
from itertools import product

import dgl
import pytest
import torch

from dgl import ETYPE, graph, to_block as dgl_to_block
from dgl.nn import CuGraphRelGraphConv, RelGraphConv

# TODO(tingyu66): Re-enable the following tests after updating cuGraph CI image.
Expand All @@ -23,7 +24,7 @@
def generate_graph():
u = torch.tensor([0, 1, 0, 2, 3, 0, 4, 0, 5, 0, 6, 7, 0, 8, 9])
v = torch.tensor([1, 9, 2, 9, 9, 4, 9, 5, 9, 6, 9, 9, 8, 9, 0])
g = dgl.graph((u, v))
g = graph((u, v))
return g


Expand All @@ -41,11 +42,11 @@ def test_relgraphconv_equality(
"self_loop": self_loop,
}
g = generate_graph().to(device)
g.edata[dgl.ETYPE] = torch.randint(num_rels, (g.num_edges(),)).to(device)
g.edata[ETYPE] = torch.randint(num_rels, (g.num_edges(),)).to(device)
if idtype_int:
g = g.int()
if to_block:
g = dgl.to_block(g)
g = dgl_to_block(g)
feat = torch.rand(g.num_src_nodes(), in_feat).to(device)

torch.manual_seed(0)
Expand All @@ -55,8 +56,8 @@ def test_relgraphconv_equality(
kwargs["apply_norm"] = False
conv2 = CuGraphRelGraphConv(*args, **kwargs).to(device)

out1 = conv1(g, feat, g.edata[dgl.ETYPE])
out2 = conv2(g, feat, g.edata[dgl.ETYPE], max_in_degree=max_in_degree)
out1 = conv1(g, feat, g.edata[ETYPE])
out2 = conv2(g, feat, g.edata[ETYPE], max_in_degree=max_in_degree)
assert torch.allclose(out1, out2, atol=1e-06)

grad_out = torch.rand_like(out1)
Expand Down
7 changes: 4 additions & 3 deletions tests/cugraph/cugraph-ops/test_cugraph_sageconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
from collections import OrderedDict
from itertools import product

import dgl
import pytest
import torch

from dgl import graph, to_block as dgl_to_block
from dgl.nn import CuGraphSAGEConv, SAGEConv

options = OrderedDict(
Expand All @@ -19,7 +20,7 @@
def generate_graph():
u = torch.tensor([0, 1, 0, 2, 3, 0, 4, 0, 5, 0, 6, 7, 0, 8, 9])
v = torch.tensor([1, 9, 2, 9, 9, 4, 9, 5, 9, 6, 9, 9, 8, 9, 0])
g = dgl.graph((u, v))
g = graph((u, v))
return g


Expand All @@ -32,7 +33,7 @@ def test_SAGEConv_equality(idtype_int, max_in_degree, to_block):
if idtype_int:
g = g.int()
if to_block:
g = dgl.to_block(g)
g = dgl_to_block(g)
feat = torch.rand(g.num_src_nodes(), in_feat).to(device)

torch.manual_seed(0)
Expand Down
8 changes: 4 additions & 4 deletions tests/cugraph/test_basics.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import cugraph # usort: skip
import backend as F

import dgl
from dgl import from_cugraph, graph


def test_dummy():
Expand All @@ -12,7 +12,7 @@ def test_dummy():


def test_to_cugraph_conversion():
g = dgl.graph((F.tensor([0, 1, 2, 3]), F.tensor([1, 0, 3, 2]))).to("cuda")
g = graph((F.tensor([0, 1, 2, 3]), F.tensor([1, 0, 3, 2]))).to("cuda")
cugraph_g = g.to_cugraph()

assert cugraph_g.number_of_nodes() == g.num_nodes()
Expand All @@ -33,7 +33,7 @@ def test_from_cugraph_conversion():

cugraph_g.from_cudf_edgelist(df)

g = dgl.from_cugraph(cugraph_g)
g = from_cugraph(cugraph_g)

assert g.device.type == "cuda"
assert g.num_nodes() == cugraph_g.number_of_nodes()
Expand All @@ -52,7 +52,7 @@ def test_from_cugraph_conversion():

cugraph_g.from_cudf_edgelist(df)

g = dgl.from_cugraph(cugraph_g)
g = from_cugraph(cugraph_g)

assert g.device.type == "cuda"
assert g.num_nodes() == cugraph_g.number_of_nodes()
Expand Down
Loading