{
  "repodata": {
    "build": "py310h214eb8d_1",
    "build_number": 1,
    "depends": [
      "__glibc >=2.28,<3.0.a0",
      "cuda-cudart >=12.8.90,<13.0a0",
      "cuda-version >=12.8,<13",
      "flash-attn 2.8.3 py310hdb63472_1",
      "libgcc >=14",
      "libstdcxx >=14",
      "libtorch >=2.10.0,<2.11.0a0",
      "python >=3.10,<3.11.0a0",
      "pytorch >=2.10.0,<2.11.0a0"
    ],
    "license": "BSD-3-Clause",
    "license_family": "BSD",
    "md5": "67e528e8530895d56c869829a20c7344",
    "name": "flash-attn-layer-norm",
    "sha256": "a89c020a77aceb609d347c4cea5d2cd188fcc88a2b661bcb7ec9eab517a3ada6",
    "size": 353012588,
    "subdir": "linux-64",
    "timestamp": 1777393812187,
    "version": "2.8.3"
  },
  "s3": "builds/ci/prefect/BivPm7AgQvCURTm3cEnh9Q/1777395329/linux-64/flash-attn-layer-norm-2.8.3-py310h214eb8d_1.tar.bz2",
  "signatures": {
    "7e3910a4b96ef2fe7242b10587a47039c8924fadf98a69503d63445e88b984b3": "ea3ed1ea6bf6e0b73fe9bbb9a525a0236cf893eeb0dba7b5e3cdd76e95af2af5565976610859beb6d20a12d5157fa3cc9b04f97e478ea8ca67c71a0a845a740a"
  }
}