|
435 | 435 | {
|
436 | 436 | "name": "_caffe2::RoIAlign(Tensor features, Tensor rois, str order, float spatial_scale, int pooled_h, int pooled_w, int sampling_ratio, bool aligned, Tensor[]? _caffe2_preallocated_outputs=None) -> Tensor"
|
437 | 437 | },
|
| 438 | + { |
| 439 | + "name": "aqlm::code2x8_lut_matmat.out(Tensor input, Tensor codes, Tensor codebooks, Tensor scales, Tensor? bias, Tensor(a!) out) -> Tensor(a!)" |
| 440 | + }, |
438 | 441 | {
|
439 | 442 | "name": "aten::Bool.Tensor(Tensor a) -> bool"
|
440 | 443 | },
|
|
2562 | 2565 | "category": "Transform"
|
2563 | 2566 | },
|
2564 | 2567 | {
|
2565 |
| - "name": "aten::embedding.out(Tensor weight, Tensor indices, SymInt padding_idx=-1, bool scale_grad_by_freq=False, bool sparse=False, *, Tensor(a!) out) -> Tensor(a!)" |
| 2568 | + "name": "aten::embedding.out(Tensor weight, Tensor indices, SymInt padding_idx=-1, bool scale_grad_by_freq=False, bool sparse=False, *, Tensor(a!) out) -> Tensor(a!)", |
| 2569 | + "category": "Transform" |
2566 | 2570 | },
|
2567 | 2571 | {
|
2568 | 2572 | "name": "aten::embedding_bag(Tensor weight, Tensor indices, Tensor offsets, bool scale_grad_by_freq=False, int mode=0, bool sparse=False, Tensor? per_sample_weights=None, bool include_last_offset=False) -> (Tensor, Tensor, Tensor, Tensor)",
|
|
3115 | 3119 | "category": "Activation"
|
3116 | 3120 | },
|
3117 | 3121 | {
|
3118 |
| - "name": "aten::gelu.out(Tensor self, *, str approximate=\"none\", Tensor(a!) out) -> Tensor(a!)" |
| 3122 | + "name": "aten::gelu.out(Tensor self, *, str approximate=\"none\", Tensor(a!) out) -> Tensor(a!)", |
| 3123 | + "category": "Activation" |
3119 | 3124 | },
|
3120 | 3125 | {
|
3121 | 3126 | "name": "aten::gelu_(Tensor(a!) self, *, str approximate=\"none\") -> Tensor(a!)",
|
|
4518 | 4523 | "category": "Normalization"
|
4519 | 4524 | },
|
4520 | 4525 | {
|
4521 |
| - "name": "aten::native_layer_norm.out(Tensor input, SymInt[] normalized_shape, Tensor? weight, Tensor? bias, float eps, *, Tensor(a!) out0, Tensor(b!) out1, Tensor(c!) out2) -> (Tensor(a!), Tensor(b!), Tensor(c!))" |
| 4526 | + "name": "aten::native_layer_norm.out(Tensor input, SymInt[] normalized_shape, Tensor? weight, Tensor? bias, float eps, *, Tensor(a!) out0, Tensor(b!) out1, Tensor(c!) out2) -> (Tensor(a!), Tensor(b!), Tensor(c!))", |
| 4527 | + "category": "Normalization" |
4522 | 4528 | },
|
4523 | 4529 | {
|
4524 | 4530 | "name": "aten::ne.Tensor(Tensor self, Tensor other) -> Tensor"
|
|
5843 | 5849 | "category": "Tensor"
|
5844 | 5850 | },
|
5845 | 5851 | {
|
5846 |
| - "name": "aten::split_with_sizes_copy(Tensor self, SymInt[] split_sizes, int dim=0) -> Tensor[]" |
| 5852 | + "name": "aten::split_with_sizes_copy(Tensor self, SymInt[] split_sizes, int dim=0) -> Tensor[]", |
| 5853 | + "category": "Tensor" |
5847 | 5854 | },
|
5848 | 5855 | {
|
5849 |
| - "name": "aten::split_with_sizes_copy.out(Tensor self, SymInt[] split_sizes, int dim=0, *, Tensor(a!)[] out) -> ()" |
| 5856 | + "name": "aten::split_with_sizes_copy.out(Tensor self, SymInt[] split_sizes, int dim=0, *, Tensor(a!)[] out) -> ()", |
| 5857 | + "category": "Tensor" |
5850 | 5858 | },
|
5851 | 5859 | {
|
5852 | 5860 | "name": "aten::splitlines(str self, bool keepends=False) -> str[]"
|
|
0 commit comments