hf_text-generation-inference/server/Makefile-flash-att

16 lines
720 B
Plaintext

flash_att_commit := 06ece1a1525ebcf4e183ac76b1e5108d2872f57f
flash-attention:
# Clone flash attention
pip install packaging
git clone https://github.com/OlivierDehaene/flash-attention.git
build-flash-attention: flash-attention
cd flash-attention && git fetch && git checkout $(flash_att_commit)
cd flash-attention && python setup.py build
cd flash-attention/csrc/rotary && python setup.py build
cd flash-attention/csrc/layer_norm && python setup.py build
install-flash-attention: build-flash-attention
pip uninstall flash_attn rotary_emb dropout_layer_norm -y || true
cd flash-attention && python setup.py install && cd csrc/layer_norm && python setup.py install && cd ../rotary && python setup.py install