Peter Hawkins 26632fd344 Replace disable_backends with enable_backends on jax_multiplatform_test.
Most users of disable_backends were actually using it to enable only a single backend. So things are simpler if we negate the sense of the option to say that. Change disable_configs to enable_configs, with a default `None` value meaning "everything is enabled".

We change the relationship between enable_backends, disable_configs, enable_configs to be the following:
* `enable_backends` selects a set of initial test configurations to enable, based off backend only.
* `disable_configs` then prunes that set of test configurations, removing elements from the set.
* `enable_configs` then adds additional configurations to the set.

Fix code in jax/experimental/mosaic/gpu/examples not to depend on a Google-internal GPU support target.

PiperOrigin-RevId: 679563155
2024-09-27 06:15:31 -07:00

80 lines
2.1 KiB
Python

# Copyright 2024 The JAX Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load(
"//jaxlib:jax.bzl",
"jax_generate_backend_suites",
"jax_multiplatform_test",
"py_deps",
)
licenses(["notice"])
package(
default_applicable_licenses = [],
default_visibility = ["//visibility:private"],
)
jax_generate_backend_suites()
jax_multiplatform_test(
name = "gpu_test",
srcs = ["gpu_test.py"],
enable_backends = [],
enable_configs = [
"gpu_h100",
"gpu_h100_2gpu",
],
shard_count = 4,
tags = ["multiaccelerator"],
deps = [
"//jax:mosaic_gpu",
] + py_deps("absl/testing") + py_deps("numpy"),
)
jax_multiplatform_test(
name = "matmul_test",
srcs = ["matmul_test.py"],
enable_backends = [],
enable_configs = ["gpu_h100"],
shard_count = 5,
deps = [
"//jax:mosaic_gpu",
"//jax/experimental/mosaic/gpu/examples:matmul",
] + py_deps("absl/testing") + py_deps("numpy") + py_deps("hypothesis"),
)
jax_multiplatform_test(
name = "flash_attention",
srcs = ["//jax/experimental/mosaic/gpu/examples:flash_attention.py"],
enable_backends = [],
enable_configs = ["gpu_h100"],
main = "//jax/experimental/mosaic/gpu/examples:flash_attention.py",
tags = ["notap"],
deps = [
"//jax:mosaic_gpu",
] + py_deps("numpy"),
)
jax_multiplatform_test(
name = "flash_attention_test",
srcs = ["flash_attention_test.py"],
enable_backends = [],
enable_configs = ["gpu_h100"],
deps = [
"//jax:mosaic_gpu",
"//jax/experimental/mosaic/gpu/examples:flash_attention",
] + py_deps("absl/testing"),
)