{
  "repodata": {
    "build": "py310h86323b5_1",
    "build_number": 1,
    "depends": [
      "__glibc >=2.28,<3.0.a0",
      "cuda-cudart >=13.0.96,<14.0a0",
      "cuda-version >=13.0,<14",
      "flash-attn 2.8.3 py310hb4df0bb_1",
      "libgcc >=14",
      "libstdcxx >=14",
      "libtorch >=2.10.0,<2.11.0a0",
      "python >=3.10,<3.11.0a0",
      "pytorch >=2.10.0,<2.11.0a0"
    ],
    "license": "BSD-3-Clause",
    "license_family": "BSD",
    "md5": "3d1d56d9fb134472422f25e22b7e429d",
    "name": "flash-attn-layer-norm",
    "sha256": "ce1a1bed3ab61e5f3e3649272d0c45d4512a98068d09f8a60baa7ece0c93587d",
    "size": 438771029,
    "subdir": "linux-64",
    "timestamp": 1777360998127,
    "version": "2.8.3"
  },
  "s3": "builds/ci/prefect/ZV_RP1hqTx-bS4rvNaC9Vg/1777363763/linux-64/flash-attn-layer-norm-2.8.3-py310h86323b5_1.tar.bz2",
  "signatures": {
    "7e3910a4b96ef2fe7242b10587a47039c8924fadf98a69503d63445e88b984b3": "1a9f724673a2dcfe1c76576e25ac091cc45ea9cc0a6b825383634e687d2e9488ba84a7a70ac8fbdb08fd902e3c55afd964a51e54ce62d8c9a42c9b601767bd0d"
  }
}