Cleanup.
This commit is contained in:
parent
8d42e1d191
commit
edc9ce9beb
|
@ -28,9 +28,7 @@ from transformers.activations import ACT2FN
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
# Flash attention imports
|
# Flash attention imports
|
||||||
import rotary_emb
|
|
||||||
import flash_attn_cuda
|
import flash_attn_cuda
|
||||||
import dropout_layer_norm
|
|
||||||
|
|
||||||
from flash_attn.layers.rotary import RotaryEmbedding
|
from flash_attn.layers.rotary import RotaryEmbedding
|
||||||
from text_generation_server.utils.layers import (
|
from text_generation_server.utils.layers import (
|
||||||
|
|
|
@ -30,9 +30,7 @@ from transformers.models.gpt_neox import GPTNeoXConfig
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
# Flash attention imports
|
# Flash attention imports
|
||||||
import rotary_emb
|
|
||||||
import flash_attn_cuda
|
import flash_attn_cuda
|
||||||
import dropout_layer_norm
|
|
||||||
|
|
||||||
from flash_attn.layers.rotary import RotaryEmbedding
|
from flash_attn.layers.rotary import RotaryEmbedding
|
||||||
from text_generation_server.utils.layers import (
|
from text_generation_server.utils.layers import (
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
from torch import nn
|
from torch import nn
|
||||||
|
import dropout_layer_norm
|
||||||
|
|
||||||
HAS_BITS_AND_BYTES = True
|
HAS_BITS_AND_BYTES = True
|
||||||
try:
|
try:
|
||||||
|
|
Loading…
Reference in New Issue