Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 5c3584a

Browse files
committedMar 14, 2023
lint
1 parent f8243b0 commit 5c3584a

File tree

2 files changed

+13
-18
lines changed

2 files changed

+13
-18
lines changed
 

‎python/dgl/nn/pytorch/__init__.py

+1-6
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,6 @@
88
from .factory import *
99
from .hetero import *
1010
from .sparse_emb import NodeEmbedding
11-
from .utils import (
12-
JumpingKnowledge,
13-
LabelPropagation,
14-
Sequential,
15-
WeightBasis,
16-
)
11+
from .utils import JumpingKnowledge, LabelPropagation, Sequential, WeightBasis
1712
from .network_emb import *
1813
from .gt import *

‎tests/python/pytorch/nn/test_nn.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -2173,12 +2173,12 @@ def test_MetaPath2Vec(idtype):
21732173
assert embeds.shape[0] == g.num_nodes()
21742174

21752175

2176-
@pytest.mark.parametrize('num_layer', [1, 4])
2177-
@pytest.mark.parametrize('k', [3, 5])
2178-
@pytest.mark.parametrize('lpe_dim', [4, 16])
2179-
@pytest.mark.parametrize('n_head', [1, 4])
2180-
@pytest.mark.parametrize('batch_norm', [True, False])
2181-
@pytest.mark.parametrize('num_post_layer', [0, 1, 2])
2176+
@pytest.mark.parametrize("num_layer", [1, 4])
2177+
@pytest.mark.parametrize("k", [3, 5])
2178+
@pytest.mark.parametrize("lpe_dim", [4, 16])
2179+
@pytest.mark.parametrize("n_head", [1, 4])
2180+
@pytest.mark.parametrize("batch_norm", [True, False])
2181+
@pytest.mark.parametrize("num_post_layer", [0, 1, 2])
21822182
def test_LapPosEncoder(
21832183
num_layer, k, lpe_dim, n_head, batch_norm, num_post_layer
21842184
):
@@ -2199,16 +2199,16 @@ def test_LapPosEncoder(
21992199
k,
22002200
lpe_dim,
22012201
batch_norm=batch_norm,
2202-
num_post_layer=num_post_layer
2202+
num_post_layer=num_post_layer,
22032203
).to(ctx)
22042204
assert model(EigVals, EigVecs).shape == (num_nodes, lpe_dim)
22052205

22062206

2207-
@pytest.mark.parametrize('feat_size', [128, 512])
2208-
@pytest.mark.parametrize('num_heads', [8, 16])
2209-
@pytest.mark.parametrize('bias', [True, False])
2210-
@pytest.mark.parametrize('attn_bias_type', ['add', 'mul'])
2211-
@pytest.mark.parametrize('attn_drop', [0.1, 0.5])
2207+
@pytest.mark.parametrize("feat_size", [128, 512])
2208+
@pytest.mark.parametrize("num_heads", [8, 16])
2209+
@pytest.mark.parametrize("bias", [True, False])
2210+
@pytest.mark.parametrize("attn_bias_type", ["add", "mul"])
2211+
@pytest.mark.parametrize("attn_drop", [0.1, 0.5])
22122212
def test_BiasedMHA(feat_size, num_heads, bias, attn_bias_type, attn_drop):
22132213
ndata = th.rand(16, 100, feat_size)
22142214
attn_bias = th.rand(16, 100, 100, num_heads)

0 commit comments

Comments
 (0)
Please sign in to comment.