Skip to content

Commit 39100bd

Browse files
authored
Merge pull request #55 from InfiniTensor/expose-kernels-module
Expose `kernels` module
2 parents 1d634ad + d8b6333 commit 39100bd

File tree

3 files changed

+82
-39
lines changed

3 files changed

+82
-39
lines changed

src/ntops/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
from ntops import torch
1+
from ntops import kernels, torch
22

3-
__all__ = ["torch"]
3+
__all__ = ["kernels", "torch"]

src/ntops/kernels/__init__.py

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
from ntops.kernels import (
2+
abs,
3+
add,
4+
addmm,
5+
bitwise_and,
6+
bitwise_not,
7+
bitwise_or,
8+
bmm,
9+
clamp,
10+
cos,
11+
div,
12+
dropout,
13+
eq,
14+
exp,
15+
ge,
16+
gelu,
17+
gt,
18+
isinf,
19+
isnan,
20+
layer_norm,
21+
le,
22+
lt,
23+
mm,
24+
mul,
25+
ne,
26+
neg,
27+
pow,
28+
relu,
29+
rms_norm,
30+
rotary_position_embedding,
31+
rsqrt,
32+
scaled_dot_product_attention,
33+
sigmoid,
34+
silu,
35+
sin,
36+
softmax,
37+
sub,
38+
tanh,
39+
)
40+
41+
__all__ = [
42+
"abs",
43+
"add",
44+
"addmm",
45+
"bitwise_and",
46+
"bitwise_not",
47+
"bitwise_or",
48+
"bmm",
49+
"clamp",
50+
"cos",
51+
"div",
52+
"dropout",
53+
"eq",
54+
"exp",
55+
"ge",
56+
"gelu",
57+
"gt",
58+
"isinf",
59+
"isnan",
60+
"layer_norm",
61+
"le",
62+
"lt",
63+
"mm",
64+
"mul",
65+
"ne",
66+
"neg",
67+
"pow",
68+
"relu",
69+
"rms_norm",
70+
"rotary_position_embedding",
71+
"rsqrt",
72+
"scaled_dot_product_attention",
73+
"sigmoid",
74+
"silu",
75+
"sin",
76+
"softmax",
77+
"sub",
78+
"tanh",
79+
]

src/ntops/torch.py

Lines changed: 1 addition & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -5,43 +5,7 @@
55
import ninetoothed
66
import torch
77

8-
import ntops.kernels.abs
9-
import ntops.kernels.add
10-
import ntops.kernels.addmm
11-
import ntops.kernels.bitwise_and
12-
import ntops.kernels.bitwise_not
13-
import ntops.kernels.bitwise_or
14-
import ntops.kernels.bmm
15-
import ntops.kernels.clamp
16-
import ntops.kernels.cos
17-
import ntops.kernels.div
18-
import ntops.kernels.dropout
19-
import ntops.kernels.eq
20-
import ntops.kernels.exp
21-
import ntops.kernels.ge
22-
import ntops.kernels.gelu
23-
import ntops.kernels.gt
24-
import ntops.kernels.isinf
25-
import ntops.kernels.isnan
26-
import ntops.kernels.layer_norm
27-
import ntops.kernels.le
28-
import ntops.kernels.lt
29-
import ntops.kernels.mm
30-
import ntops.kernels.mul
31-
import ntops.kernels.ne
32-
import ntops.kernels.neg
33-
import ntops.kernels.pow
34-
import ntops.kernels.relu
35-
import ntops.kernels.rms_norm
36-
import ntops.kernels.rotary_position_embedding
37-
import ntops.kernels.rsqrt
38-
import ntops.kernels.scaled_dot_product_attention
39-
import ntops.kernels.sigmoid
40-
import ntops.kernels.silu
41-
import ntops.kernels.sin
42-
import ntops.kernels.softmax
43-
import ntops.kernels.sub
44-
import ntops.kernels.tanh
8+
import ntops
459
from ntops.kernels.scaled_dot_product_attention import CausalVariant
4610

4711

0 commit comments

Comments
 (0)