repo stringlengths 1 99 | file stringlengths 13 215 | code stringlengths 12 59.2M | file_length int64 12 59.2M | avg_line_length float64 3.82 1.48M | max_line_length int64 12 2.51M | extension_type stringclasses 1
value |
|---|---|---|---|---|---|---|
xformers | xformers-main/xformers/components/attention/sparsity_config.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
"""
The code has been adopted from DeepSpeed
(https://github.com/microsoft/DeepSpeed/blob/master/deepspeed/ops/sparse_atte... | 41,608 | 50.179582 | 118 | py |
xformers | xformers-main/xformers/components/attention/linformer.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import dataclass
from typing import Optional
import torch
import torch.nn as nn
from xformers.compone... | 2,491 | 32.226667 | 99 | py |
xformers | xformers-main/xformers/components/attention/attention_patterns.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import math
from typing import List
import numpy as np
import torch
from xformers.components.attention.sparsity_config... | 9,945 | 32.601351 | 104 | py |
xformers | xformers-main/xformers/components/attention/random.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import dataclass
from typing import Optional, Union
import torch
import torch.nn as nn
from xformers.... | 4,131 | 31.535433 | 118 | py |
xformers | xformers-main/xformers/components/attention/__init__.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
from pathlib import Path
from typing import Any, Callable, Dict, Set, Union
import torch
from xformers.u... | 3,983 | 28.731343 | 90 | py |
xformers | xformers-main/xformers/components/attention/ortho.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
from dataclasses import dataclass
from enum import Enum
from typing import Optional, Union
import torch
... | 12,119 | 36.292308 | 110 | py |
xformers | xformers-main/xformers/components/attention/visual.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import math
from dataclasses import dataclass
import torch
import torch.nn as nn
from xformers.components.attention im... | 2,929 | 29.206186 | 104 | py |
xformers | xformers-main/xformers/components/attention/favor.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
import math
from dataclasses import dataclass
from typing import Optional, Tuple
import torch
import torc... | 6,191 | 34.586207 | 115 | py |
xformers | xformers-main/xformers/components/attention/global_tokens.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import dataclass
from typing import Optional, Union
import torch
import torch.nn as nn
from xformers.... | 4,091 | 32.268293 | 118 | py |
xformers | xformers-main/xformers/components/attention/attention_mask.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import Optional, Type, TypeVar
import torch
Self = TypeVar("Self", bound="AttentionMask")
class Attentio... | 4,585 | 30.847222 | 114 | py |
xformers | xformers-main/xformers/components/attention/blocksparse.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
import math
from dataclasses import dataclass
import torch
from xformers import _is_triton_available
fr... | 6,725 | 34.21466 | 114 | py |
xformers | xformers-main/xformers/components/attention/fourier_mix.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import torch
from torch.cuda.amp import autocast
from xformers.components.attention import Attention, AttentionConfig, r... | 1,181 | 31.833333 | 88 | py |
xformers | xformers-main/xformers/components/attention/feature_maps/base.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from abc import abstractmethod
from dataclasses import asdict, dataclass
from typing import Optional, Type, TypeVar
imp... | 1,672 | 25.983871 | 88 | py |
xformers | xformers-main/xformers/components/attention/feature_maps/softmax.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import math
from enum import Enum, auto
from typing import Optional
import torch
from torch.autograd.profiler import re... | 10,598 | 35.67474 | 106 | py |
xformers | xformers-main/xformers/components/nvfuser/utils.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, Dict, List, Optional
import torch.nn as nn
from xformers.components import Activation, Residual... | 1,141 | 28.282051 | 82 | py |
xformers | xformers-main/xformers/components/nvfuser/bias_dropout_res.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import functools
from typing import Optional
import torch
import torch.nn as nn
from functorch.compile import memory_ef... | 1,965 | 27.911765 | 96 | py |
xformers | xformers-main/xformers/components/nvfuser/__init__.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from xformers import _is_functorch_available
if _is_functorch_available: # noqa
try:
from .bias_act_dropou... | 724 | 31.954545 | 86 | py |
xformers | xformers-main/xformers/components/nvfuser/bias_act_dropout.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import functools
from typing import Optional
import torch
import torch.nn as nn
from functorch.compile import memory_ef... | 2,304 | 27.109756 | 96 | py |
xformers | xformers-main/xformers/components/nvfuser/bias_dropout_res_layernorm.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import functools
from typing import Optional
import torch
import torch.nn as nn
from functorch.compile import memory_ef... | 2,790 | 28.691489 | 96 | py |
xformers | xformers-main/xformers/profiler/device_limits.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import math
from dataclasses import dataclass, field
from typing import Mapping, Tuple
import torch
@dataclass
class D... | 2,998 | 29.292929 | 147 | py |
xformers | xformers-main/xformers/profiler/api.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, Optional, Sequence, Tuple
import torch.nn as nn
from .profiler import MemSnapshotsProfiler, Nsi... | 2,550 | 29.011765 | 92 | py |
xformers | xformers-main/xformers/profiler/slow_ops_profiler.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import itertools
import json
import math
import os
from collections import defaultdict
from dataclasses import dataclass,... | 16,504 | 30.740385 | 90 | py |
xformers | xformers-main/xformers/profiler/profiler.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
import os
import queue
import socket
import weakref
from dataclasses import dataclass
from typing import ... | 11,850 | 32.196078 | 89 | py |
xformers | xformers-main/xformers/ops/indexing.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import Optional, Sequence
import torch
from .common import BaseOperator, get_xformers_operator, register_o... | 6,654 | 28.709821 | 92 | py |
xformers | xformers-main/xformers/ops/swiglu_op.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import dataclass
from typing import Dict, Optional, Sequence, Tuple, Union
import torch
import torch.nn... | 15,168 | 31.412393 | 131 | py |
xformers | xformers-main/xformers/ops/unbind.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import List, Optional, Sequence, Tuple, Union
import torch
from .common import _get_storage_base
def get_... | 3,556 | 27.230159 | 83 | py |
xformers | xformers-main/xformers/ops/common.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import inspect
from typing import Any, Dict, List, Type, TypeVar
import torch
def get_operator(library: str, name: str... | 3,058 | 28.990196 | 116 | py |
xformers | xformers-main/xformers/ops/__init__.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import torch
from .fmha import (
AttentionBias,
AttentionOp,
AttentionOpBase,
AttentionOpDispatch,
L... | 2,473 | 25.319149 | 71 | py |
xformers | xformers-main/xformers/ops/fmha/small_k.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, List, Mapping, Optional, Set, Tuple, Union
import torch
from ..common import get_xformers_opera... | 6,344 | 32.930481 | 84 | py |
xformers | xformers-main/xformers/ops/fmha/attn_bias.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import math
from dataclasses import dataclass
from typing import Any, Iterable, List, Optional, Sequence, Tuple, Union
... | 22,605 | 35.46129 | 128 | py |
xformers | xformers-main/xformers/ops/fmha/flash.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import replace
from typing import Any, List, Optional, Set, Tuple
import torch
from ..common import _... | 12,739 | 29.478469 | 112 | py |
xformers | xformers-main/xformers/ops/fmha/common.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import math
from dataclasses import dataclass
from typing import Any, List, Mapping, Optional, Set, Tuple, Type, Union
i... | 14,233 | 33.216346 | 116 | py |
xformers | xformers-main/xformers/ops/fmha/__init__.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, Optional, Tuple, Type, Union
import torch
from . import cutlass, flash, small_k, triton
from .a... | 13,376 | 32.4425 | 119 | py |
xformers | xformers-main/xformers/ops/fmha/cutlass.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from enum import Enum
from typing import Any, List, Mapping, Optional, Set, Tuple, Union
import torch
from ..common im... | 13,621 | 32.717822 | 113 | py |
xformers | xformers-main/xformers/ops/fmha/triton.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import replace
from typing import TYPE_CHECKING, Any, List, Optional, Set, Tuple
import torch
from ..... | 6,242 | 35.086705 | 115 | py |
xformers | xformers-main/tests/test_block_factory.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
# Automatically fetch all registered attentions and Feedforwards
from xformers.components imp... | 13,791 | 32.153846 | 101 | py |
xformers | xformers-main/tests/test_custom_ops.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
# needed to register custom ops
import xformers # noqa: F401
import xformers.components.atte... | 14,325 | 29.480851 | 115 | py |
xformers | xformers-main/tests/test_attention_patterns.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import itertools
import pytest
import torch
import xformers.components.attention.attention_patterns as AP
from xformers... | 15,826 | 32.25 | 115 | py |
xformers | xformers-main/tests/test_core_attention.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch import nn
from xformers import _is_triton_available
from xformers.components.atten... | 7,126 | 30.535398 | 84 | py |
xformers | xformers-main/tests/test_nystrom_attention.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import random
import pytest
import torch
from xformers.components.attention import NystromAttention, ScaledDotProduct
fr... | 5,006 | 31.72549 | 84 | py |
xformers | xformers-main/tests/test_pytorch_transformer_parity.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import random
import pytest
import torch
from xformers import _is_triton_available
if _is_triton_available():
from... | 7,014 | 33.387255 | 87 | py |
xformers | xformers-main/tests/test_rotary_embeddings.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from xformers.components.positional_embedding import RotaryEmbedding
from xformers.components... | 2,699 | 28.347826 | 88 | py |
xformers | xformers-main/tests/test_attentions.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import math
from typing import Tuple
import pytest
import torch
from xformers.components import (
InputProjection,
... | 15,665 | 29.960474 | 114 | py |
xformers | xformers-main/tests/test_residual.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from xformers.components import NormalizationType, PreNorm
class Passthrough(torch.nn.Modu... | 946 | 26.057143 | 88 | py |
xformers | xformers-main/tests/test_triton_layernorm.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
import pytest
import torch
from torch.cuda.amp.autocast_mode import autocast
import xformers
try:
... | 3,954 | 31.154472 | 86 | py |
xformers | xformers-main/tests/test_profiler.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch.utils._python_dispatch import TorchDispatchMode, _get_current_dispatch_mode
import... | 3,958 | 27.89781 | 86 | py |
xformers | xformers-main/tests/test_sparsecs.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from xformers.components.attention import maybe_sparsify
from xformers.components.attention._... | 3,832 | 35.160377 | 87 | py |
xformers | xformers-main/tests/test_triton_basics.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
import xformers
SHAPES = [
(384, 128),
(8 * 384, 128),
(34, 128),
(16, 128)... | 5,315 | 28.04918 | 87 | py |
xformers | xformers-main/tests/test_triton_dropout.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
import pytest
import torch
from torch.cuda.amp.autocast_mode import autocast
import xformers
from xform... | 6,533 | 31.346535 | 93 | py |
xformers | xformers-main/tests/test_pickling.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
# CREDITS: Initially suggested by Jason Ramapuram, see
# https://github.com/facebookresearch/xformers/issues/203
import ... | 1,605 | 24.492063 | 71 | py |
xformers | xformers-main/tests/test_reversible.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import random
import pytest
import torch
from xformers.factory.model_factory import xFormer, xFormerConfig
BATCH = 2
S... | 6,694 | 28.755556 | 110 | py |
xformers | xformers-main/tests/test_triton_blocksparse.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
import xformers
from xformers.components import MultiHeadDispatch
from xformers.components.at... | 8,565 | 32.330739 | 98 | py |
xformers | xformers-main/tests/test_model_factory.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from contextlib import nullcontext
import pytest
import torch
import xformers.factory.weight_init as xformers_weight_in... | 7,263 | 30.859649 | 97 | py |
xformers | xformers-main/tests/utils.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import Optional
import numpy as np
import torch
def assert_allclose(
out: Optional[torch.Tensor],
... | 1,363 | 35.864865 | 94 | py |
xformers | xformers-main/tests/test_feedforward.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from xformers.components import Activation
from xformers.components.feedforward import FEEDFO... | 2,932 | 31.230769 | 86 | py |
xformers | xformers-main/tests/test_timm_sparse.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
import xformers
try:
import timm
from timm.models.vision_transformer import VisionTr... | 2,134 | 30.865672 | 82 | py |
xformers | xformers-main/tests/test_indexing.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import random
import pytest
import torch
import xformers.ops as xops
from .utils import assert_allclose
cuda_only = p... | 3,791 | 30.6 | 96 | py |
xformers | xformers-main/tests/test_ortho_attention.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import random
import pytest
import torch
from xformers.components.attention import OrthoFormerAttention, ScaledDotProduc... | 3,184 | 34.388889 | 87 | py |
xformers | xformers-main/tests/test_embedding.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from xformers.components import PatchEmbeddingConfig, build_patch_embedding
from xformers.com... | 1,911 | 27.969697 | 85 | py |
xformers | xformers-main/tests/test_compositional_attention.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from xformers.components import MultiHeadDispatch
# Automatically test all the registered at... | 3,049 | 30.122449 | 88 | py |
xformers | xformers-main/tests/test_triton_fused_linear.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
import pytest
import torch
from torch.cuda.amp.autocast_mode import autocast
import xformers
from xforme... | 6,406 | 34.010929 | 106 | py |
xformers | xformers-main/tests/test_sparse_tensors.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
# needed to register custom ops
import xformers # noqa: F401
from xformers.ops import masked... | 9,074 | 28.656863 | 111 | py |
xformers | xformers-main/tests/test_attention_mask.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from xformers.components.attention import AttentionMask
@pytest.mark.skipif(
not torch... | 2,301 | 36.129032 | 85 | py |
xformers | xformers-main/tests/test_hierarchical_transformer.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import torch
from xformers.factory import xFormer, xFormerConfig
from xformers.helpers.hierarchical_configs import (
... | 1,802 | 25.514706 | 71 | py |
xformers | xformers-main/tests/test_favor.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import math
import pytest
import torch
from xformers.components.attention import FavorAttention, ScaledDotProduct
from ... | 5,116 | 29.458333 | 95 | py |
xformers | xformers-main/tests/test_attention_utils.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import torch
from xformers.components.attention.utils import (
maybe_merge_masks,
reshape_key_padding_mask,
)
... | 1,259 | 29.731707 | 87 | py |
xformers | xformers-main/tests/test_triton_varargs.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
import sys
import pytest
import torch
import xformers
try:
import triton
import triton.language... | 3,168 | 32.712766 | 88 | py |
xformers | xformers-main/tests/test_triton_softmax.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import logging
import pytest
import torch
from torch.cuda.amp.autocast_mode import autocast
import xformers
try:
f... | 5,170 | 30.919753 | 82 | py |
xformers | xformers-main/tests/test_swiglu.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import copy
import functools
import random
from contextlib import nullcontext
from typing import ContextManager, Optional... | 7,203 | 29.918455 | 120 | py |
xformers | xformers-main/tests/test_global_attention.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import torch
from xformers.components.attention import GlobalAttention, ScaledDotProduct
def test_global_attention():
... | 1,456 | 32.883721 | 102 | py |
xformers | xformers-main/tests/test_mem_eff_attention.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import math
import random
from typing import List, Optional, Sequence, Tuple, Type, TypeVar
import pytest
import torch
f... | 58,219 | 31.818489 | 146 | py |
xformers | xformers-main/tests/test_unbind.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import random
import pytest
import torch
import xformers.ops
from xformers.ops.common import _get_storage_base
@pytes... | 3,205 | 30.126214 | 83 | py |
xformers | xformers-main/docs/source/conf.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
# type: ignore
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the... | 4,995 | 32.086093 | 80 | py |
xformers | xformers-main/packaging/build_conda.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import os
import shutil
import subprocess
from dataclasses import dataclass, field
from pathlib import Pat... | 7,574 | 33.121622 | 101 | py |
xformers | xformers-main/experimental/tests/test_triton_v2_qk_dotprod.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import time
import pytest
import torch
from ragged_inference.test_utils import assert_eq, bf16_support
from ragged_infe... | 3,938 | 25.979452 | 101 | py |
xformers | xformers-main/experimental/tests/test_seq_kv_cache.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import time
import pytest
import torch
from ragged_inference.garbage_pad_ragged_acts import RaggedActivations
from ragg... | 10,576 | 30.385757 | 106 | py |
xformers | xformers-main/experimental/tests/test_triton_v2_matmul.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from ragged_inference.test_utils import assert_eq, bf16_support
from ragged_inference.triton_... | 1,033 | 21.977778 | 86 | py |
xformers | xformers-main/experimental/tests/test_triton_v2_ragged_qk_dotprod.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import time
import pytest
import torch
from ragged_inference.garbage_pad_ragged_acts import RaggedActivations
from ragg... | 6,282 | 31.056122 | 134 | py |
xformers | xformers-main/experimental/ragged_inference/garbage_pad_ragged_acts.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import List
import numpy as np
import torch
import triton
import triton.language as tl
@triton.jit
def ga... | 6,217 | 31.554974 | 91 | py |
xformers | xformers-main/experimental/ragged_inference/triton_v2_qk_dotprod.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import torch
import triton
import triton.language as tl
from triton.ops.matmul_perf_model import early_config_prune, est... | 6,453 | 28.47032 | 84 | py |
xformers | xformers-main/experimental/ragged_inference/triton_v2_matmul.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import torch
import triton
import triton.language as tl
from triton.ops.matmul_perf_model import early_config_prune, est... | 5,559 | 27.22335 | 87 | py |
xformers | xformers-main/experimental/ragged_inference/seq_kv_cache.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from functools import lru_cache
from typing import List, Tuple
import torch
from ragged_inference.garbage_pad_ragged_ac... | 4,700 | 27.840491 | 88 | py |
xformers | xformers-main/experimental/ragged_inference/triton_v2_ragged_qk_dotprod.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import dataclass
from typing import List, Optional
import torch
import triton
import triton.language as... | 12,921 | 35.196078 | 109 | py |
xformers | xformers-main/experimental/ragged_inference/test_utils.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, Dict, Tuple
import numpy as np
import torch
_DTYPE_PRECISIONS = {
torch.float16: (1e-3, 1e... | 3,890 | 32.834783 | 88 | py |
xformers | xformers-main/stubs/torch_stub_tests.py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, Tuple, TypeVar
import torch
import torch.nn as nn
from pyre_extensions import TypeVarTuple, Unpa... | 73,722 | 39.197928 | 94 | py |
a-vb-emotions | a-vb-emotions-main/losses.py | import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from metrics import CCC
from end2you.utils import Params
from typing import List
from dataset import VOCAL_TYPES, DIMENSIONS, EMOTIONS, CULTURE_EMOTIONS
"""
Loss definitions
"""
class CCC_Loss(nn.Module):
"""
Computes CCC ... | 15,861 | 34.170732 | 195 | py |
a-vb-emotions | a-vb-emotions-main/metrics.py | """
Metrics definitions
"""
import torch
import torch.nn.functional as F
import numpy as np
from end2you.utils import Params
from sklearn.metrics import recall_score
from typing import Any, Dict
from dataset import VOCAL_TYPES, DIMENSIONS, CULTURES, EMOTIONS, CULTURE_EMOTIONS
def CCC(preds:torch.Tensor, targets:torc... | 5,997 | 31.775956 | 163 | py |
a-vb-emotions | a-vb-emotions-main/trainer.py | """
Trainer Class collecting components
"""
from asyncio import tasks
import torch
from torch.utils.tensorboard import SummaryWriter
import numpy as np
import pandas as pd
import random
from end2you.utils import Params
from pathlib import Path
import json
from shutil import copy
from copy import deepcopy
from typing i... | 20,608 | 34.410653 | 149 | py |
a-vb-emotions | a-vb-emotions-main/optimizer.py | """
Optimizer code
"""
from statistics import mode
import torch
import torch.nn as nn
from end2you.utils import Params
from model.models import AbstractModel
from losses import Criterion
def get_optimizer(train_params:Params, model:AbstractModel, criterion:Criterion) -> torch.optim.Optimizer:
opt_name = str(trai... | 1,804 | 26.769231 | 106 | py |
a-vb-emotions | a-vb-emotions-main/dataset/vocal_data.py | import os
import random
from functools import partial
import torch
import torch.nn.functional as F
import numpy as np
import pandas as pd
from end2you.utils import Params
from pathlib import Path
import torchaudio
from torch.utils.data import Dataset, DataLoader
from pytorch_lightning import LightningDataModule
import ... | 7,568 | 32.197368 | 144 | py |
a-vb-emotions | a-vb-emotions-main/dataset/data_augmentation.py | import torch
import random
"""
wav_augment audio transforms for data augmentation
"""
# Chain Runner here
class ChainRunner():
def __init__(self, chain) -> None:
self.chain = chain
def __call__(self, data):
src_info = {
"channels": data.size(0),
"length": data... | 949 | 17.269231 | 57 | py |
a-vb-emotions | a-vb-emotions-main/model/models.py | import enum
from sqlite3 import paramstyle
import torch
import torch.nn as nn
import torch.nn.functional as F
import transformers
from end2you.utils import Params
from model import WAV2VEC2_BASE_PATH, get_feature_dim
import dataset
"""
Model definitions
"""
def count_all_parameters(model:torch.nn.Module) -> int:
... | 51,381 | 38.014427 | 158 | py |
pytorch-mac-network | pytorch-mac-network-master/code/main.py | from __future__ import print_function
import torch
import argparse
import os
import random
import sys
import datetime
import dateutil
import dateutil.tz
import shutil
dir_path = (os.path.abspath(os.path.join(os.path.realpath(__file__), './.')))
sys.path.append(dir_path)
from config import cfg, cfg_from_file
from uti... | 1,981 | 28.58209 | 116 | py |
pytorch-mac-network | pytorch-mac-network-master/code/utils.py | import os
import errno
import numpy as np
import glob
import pickle
from copy import deepcopy
from config import cfg
from torch.nn import init
import torch
from torch.autograd import Variable
import torch.nn as nn
from torch.nn import functional as F
import torchvision.utils as vutils
def save_model(model, optim, i... | 3,169 | 31.020202 | 96 | py |
pytorch-mac-network | pytorch-mac-network-master/code/datasets.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
import torch.utils.data as data
import torchvision.transforms as transforms
from PIL import Image
import PIL
import os
import os.path
import pickle
import ra... | 1,770 | 27.111111 | 101 | py |
pytorch-mac-network | pytorch-mac-network-master/code/mac.py | import torch
import torch.nn as nn
import torch.nn.init as init
from torch.autograd import Variable
from utils import *
def load_MAC(cfg, vocab):
kwargs = {'vocab': vocab,
'max_step': cfg.TRAIN.MAX_STEPS
}
model = MACNetwork(cfg, **kwargs)
model_ema = MACNetwork(cfg, **kwargs... | 10,457 | 33.976589 | 127 | py |
pytorch-mac-network | pytorch-mac-network-master/code/trainer.py | from __future__ import print_function
import sys
import os
import shutil
from six.moves import range
import pprint
from tqdm import tqdm
from tensorboardX import SummaryWriter
import torch.backends.cudnn as cudnn
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
from t... | 10,325 | 34.484536 | 124 | py |
enas | enas-master/src/common_ops.py | import numpy as np
import tensorflow as tf
def lstm(x, prev_c, prev_h, w):
ifog = tf.matmul(tf.concat([x, prev_h], axis=1), w)
i, f, o, g = tf.split(ifog, 4, axis=1)
i = tf.sigmoid(i)
f = tf.sigmoid(f)
o = tf.sigmoid(o)
g = tf.tanh(g)
next_c = i * g + f * prev_c
next_h = o * tf.tanh(next_c)
return n... | 1,100 | 27.973684 | 83 | py |
enas | enas-master/src/cifar10/image_ops.py | import numpy as np
import tensorflow as tf
from tensorflow.python.training import moving_averages
from src.common_ops import create_weight
from src.common_ops import create_bias
def drop_path(x, keep_prob):
"""Drops out a whole example hiddenstate with the specified probability."""
batch_size = tf.shape(x)[0]
... | 6,621 | 35.384615 | 84 | py |
FasterSeg | FasterSeg-master/tools/seg_opr/loss_opr.py | import torch
import torch.nn as nn
import torch.nn.functional as F
from engine.logger import get_logger
logger = get_logger()
class SigmoidFocalLoss(nn.Module):
def __init__(self, ignore_label, gamma=2.0, alpha=0.25,
reduction='mean'):
super(SigmoidFocalLoss, self).__init__()
se... | 3,584 | 37.138298 | 81 | py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.