MAX_JOBS=6 pip install flash-attn --use-pep517 --no-build-isolation #git clone https://github.com/Dao-AILab/flash-attention.git #cd flash-attention/hopper && MAX_JOBS=4 python setup.py install