This commit is contained in:
Nicolas Patry 2023-05-15 15:21:49 +02:00
parent 8d42e1d191
commit edc9ce9beb
3 changed files with 1 additions and 4 deletions

View File

@ -28,9 +28,7 @@ from transformers.activations import ACT2FN
from typing import Optional from typing import Optional
# Flash attention imports # Flash attention imports
import rotary_emb
import flash_attn_cuda import flash_attn_cuda
import dropout_layer_norm
from flash_attn.layers.rotary import RotaryEmbedding from flash_attn.layers.rotary import RotaryEmbedding
from text_generation_server.utils.layers import ( from text_generation_server.utils.layers import (

View File

@ -30,9 +30,7 @@ from transformers.models.gpt_neox import GPTNeoXConfig
from typing import Optional from typing import Optional
# Flash attention imports # Flash attention imports
import rotary_emb
import flash_attn_cuda import flash_attn_cuda
import dropout_layer_norm
from flash_attn.layers.rotary import RotaryEmbedding from flash_attn.layers.rotary import RotaryEmbedding
from text_generation_server.utils.layers import ( from text_generation_server.utils.layers import (

View File

@ -1,6 +1,7 @@
import torch import torch
from torch import nn from torch import nn
import dropout_layer_norm
HAS_BITS_AND_BYTES = True HAS_BITS_AND_BYTES = True
try: try: