summarylogtreecommitdiffstats
path: root/flash-attention.diff
blob: 392a5d341fbb23a6a8059add39cebcf191602186 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
diff --git a/setup.py b/setup.py
index 264b0ee..07945fc 100644
--- a/setup.py
+++ b/setup.py
@@ -285,11 +285,12 @@ if not SKIP_CUDA_BUILD and not IS_ROCM:
                 "csrc/flash_attn/src/flash_fwd_split_hdim256_bf16_causal_sm80.cu",
             ],
             extra_compile_args={
-                "cxx": ["-O3", "-std=c++17"],
+                "cxx": ["-O3", "-std=c++17", "-DGLOG_USE_GLOG_EXPORT"],
                 "nvcc": append_nvcc_threads(
                     [
                         "-O3",
                         "-std=c++17",
+                        "-DGLOG_USE_GLOG_EXPORT",
                         "-U__CUDA_NO_HALF_OPERATORS__",
                         "-U__CUDA_NO_HALF_CONVERSIONS__",
                         "-U__CUDA_NO_HALF2_OPERATORS__",
@@ -536,6 +537,7 @@ setup(
             "docs",
             "benchmarks",
             "flash_attn.egg-info",
+            "hopper",
         )
     ),
     author="Tri Dao",
@@ -563,6 +565,5 @@ setup(
     setup_requires=[
         "packaging",
         "psutil",
-        "ninja",
     ],
 )