I have a need to write a rule which zips up all transitive config files (*.foo) of an executable (can be custom rule, java_binary and docker container_image).
The config files can appear on the srcs attribute of any attribute of the executable (tars, deps, runtime_deps, etc)
This sounds like it should be rather easy to do with an aspect attached to my rule but I lost my way between the various examples.
sources_aspect.bzl:
SourceFiles = provider(
fields = {
"transitive_source_files": "list of transitive source files of a target",
},
)
#add 'resources' ? if so _accumulate_transitive_config_files needs to check for dep in deps if ConfigFiles in dep
SourceAttr = ["tars", "deps", "runtime_deps", "exports"]
def _accumulate_transitive_source_files(accumulated, deps):
return depset(
transitive = [dep[SourceFiles].transitive_source_files for dep in deps] + [accumulated],
)
def _collect_current_source_files(srcs):
return [file for src in srcs for file in src.files.to_list()]
def _collect_source_files_aspect_impl(target, ctx):
current_source_files = []
if hasattr(ctx.rule.attr, "srcs"):
current_source_files = _collect_current_source_files(ctx.rule.attr.srcs)
if hasattr(ctx.rule.attr, "resources"):
current_source_files = current_source_files + _collect_current_source_files(ctx.rule.attr.resources)
accumulated_source_files = depset(current_source_files)
for attr in SourceAttr:
if hasattr(ctx.rule.attr, attr):
accumulated_source_files = _accumulate_transitive_source_files(accumulated_source_files, getattr(ctx.rule.attr, attr))
return [SourceFiles(transitive_source_files = accumulated_source_files)]
collect_source_files_aspect = aspect(
implementation = _collect_source_files_aspect_impl,
attr_aspects = SourceAttr,
)
sources.bzl
load("//sources/src/main:sources_aspect.bzl", "SourceFiles", "collect_source_files_aspect")
def _owner_to_bazel_file(fileLabel):
workspace = fileLabel.workspace_root
package = fileLabel.package
if 0 < len(workspace):
workspace = workspace + "/"
if 0 < len(package):
package = package + "/"
return workspace + package + "BUILD.bazel"
def _collect_source_files_rule_impl(ctx):
metadata = [ctx.attr.group_id, ctx.attr.artifact_id]
paths = sorted([f.path for f in ctx.attr.main_artifact_name[SourceFiles].transitive_source_files.to_list()])
owners = sorted(depset([_owner_to_bazel_file(f.owner) for f in ctx.attr.main_artifact_name[SourceFiles].transitive_source_files.to_list()] + [_owner_to_bazel_file(ctx.label)]).to_list())
ctx.actions.write(ctx.outputs.sources, "\n".join(metadata + paths + owners))
ctx.actions.write(ctx.outputs.source_files, "{\"groupId\": \"%s\", \"artifactId\": \"%s\", \"sources\": %s, \"buildFiles\": %s}" % (ctx.attr.group_id, ctx.attr.artifact_id, paths, owners))
return DefaultInfo(
runfiles = ctx.runfiles(files = [ctx.outputs.sources, ctx.outputs.source_files]),
)
source_files = rule(
implementation = _collect_source_files_rule_impl,
attrs = {
"main_artifact_name": attr.label(aspects = [collect_source_files_aspect]),
"group_id": attr.string(mandatory = True),
"artifact_id": attr.string(mandatory = True),
},
outputs = {"sources": "%{name}.sources.txt", "source_files": "%{name}.sources.json"},
)
Related
Say I have something like the following.
def _foo_aspect_impl(target, ctx):
# operations
return FooInfo(...)
foo_aspect = aspect(implementation = _foo_aspect_impl,
attr_aspects = ['deps'],
attrs = dict(
_tool = attr.Label(
# defs
),
)
)
def _foo_rule_impl(ctx):
for dep in ctx.attr.deps:
# do something with `dep[FooInfo]`
return DefaultInfo(...)
foo_rule = rule(
implementation = _foo_rule_impl,
attrs = dict(
"deps": attr.label_list(
aspects = [foo_aspect],
)
)
)
Is there a way to change the value of foo_aspect.attr._tool, either in WORKSPACE, or at the invocation of foo_rule? Former is much preferable.
The use case being that version and repository origin of _tool might change from project to project. When aspect resides in a repository shared by two projects, it does not make sense to create two branches for these two projects just for versioning of _tool.
After a lot of head scratching I found a rather complicated way of doing it.
Since the only thing that seems to be configuarable in WORKSPACE.bazel during loading phase is other workspaces / repositories, one could actually use target aliasing together with repository loading to mutiplex configuarable targets.
Here is how it works.
First, define a new repository rule new_virtual_repository, which creates repositories that does nothing but loading the BUILD.bazel and WORKSPACE.bazel files.
# repo.bzl
load("#bazel_tools//tools/build_defs/repo:utils.bzl", "workspace_and_buildfile")
def _new_virtual_repo_impl(ctx):
# Create build file
workspace_and_buildfile(ctx)
return ctx.attr
new_virtual_repository = repository_rule(
implementation = _new_virtual_repo_impl,
attrs = dict(
build_file = attr.label(allow_single_file = True),
build_file_content = attr.string(),
workspace_file = attr.label(allow_single_file = True),
workspace_file_content = attr.string(),
),
local = True,
)
Then, create an extension file config.bzl which implements a function that generates the BUILD.bazel file and load the virtual repository:
# config.bzl
load(":repo.bzl", "new_virtual_repository")
def config(tool):
build_file_content = """
alias(
name = "tool",
actual = "%s",
""" % (tool)
new_virtual_repository(
name = "config_repo",
build_file_content = build_file_content,
)
Now in the aspect specification:
# aspect.bzl
foo_aspect = aspect(
...
attrs = dict(
_tool = attr.Label("#config_repo//:tool"),
)
)
Finally, configure the actual tool in WORKSPACE.bazel:
# WORKSPACE.bazel
load("//:config.bzl", "config")
config(tool="<actual_tool_label>")
I'm writing some rules and learning Starlark as I progress.
Assume I have my own provider:
ModularResources = provider(
doc = "Modular resources",
fields = {
"artifactId": "Former Maven artifact id (don't ask me why)",
"srcs": "List of labels (a glob(..) thing)",
},
)
def _modular_resources_impl(ctx):
return ModularResources(
artifactId = ctx.attr.artifactId,
srcs = ctx.attr.srcs,
)
modular_resources = rule(
implementation = _modular_resources_impl,
attrs = {
"artifactId": attr.string(
mandatory = True,
),
"srcs": attr.label_list(
allow_files = True,
mandatory = True,
),
},
)
Then I have a generator rule which requires these:
some_generator = rule(
attrs = {
"deps": attr.label_list(
providers = [ ModularResources ]
),
...
},
...
)
In my implementation I discovered that I need to do a couple of unwraps to get the files:
def _get_files(deps):
result = []
for dep in deps:
for target in dep[ModularResources].srcs:
result += target.files.to_list()
return result
Is there a more efficient way to perform the collection?
As to why I'm doing this, the generator actually needs a special list of files like this:
def _format_files(deps):
formatted = ""
for dep in deps:
for target in dep[ModularResources].srcs:
formatted += ",".join([dep[ModularResources].artifactId + ":" + f.path for f in target.files.to_list()])
return formatted
FWIW, here is an example how this is used:
a/BUILD:
modular_resources(
name = "generator_resources",
srcs = glob(
["some/path/**/*.whatever"],
),
artifactId = "a",
visibility = ["//visibility:public"],
)
b/BUILD:
some_generator(
name = "...",
deps = [
"//a:generator_resources"
]
)
If you want to trade memory for better performance, maybe the operation can more easily be parallelised by blaze if it's done in the provider instead:
def _modular_resources_impl(ctx):
return ModularResources(
artifactId = ctx.attr.artifactId,
formatted_srcs = ",".join([artifactId + ":" + f.path for f in ctx.files.src])
)
Say I have a custom rule, my_object. It looks like:
my_object(
name = "foo",
deps = [
//services/image-A:push,
//services/image-B:push,
]
)
Where the labels in deps are rules_docker's container_push rules.
I want to be able to bazel run //:foo and have it push the Docker images within the deps list. How do I do this?
This seems to be a specific case of just generally wanting to run the executables of other rules within the executable of a custom rule.
The thing to do here is to have my_object output an executable that executes the other executables.
Consider this example:
def _impl1(ctx):
ctx.actions.write(
output = ctx.outputs.executable,
is_executable = True,
content = "echo %s 123" % ctx.label.name)
return DefaultInfo(executable = ctx.outputs.executable)
exec_rule1 = rule(
implementation = _impl1,
executable = True,
)
def _impl2(ctx):
executable_paths = []
runfiles = ctx.runfiles()
for dep in ctx.attr.deps:
# the "./" is needed if the executable is in the current directory
# (i.e. in the workspace root)
executable_paths.append("./" + dep.files_to_run.executable.short_path)
# collect the runfiles of the other executables so their own runfiles
# will be available when the top-level executable runs
runfiles = runfiles.merge(dep.default_runfiles)
ctx.actions.write(
output = ctx.outputs.executable,
is_executable = True,
content = "\n".join(executable_paths))
return DefaultInfo(
executable = ctx.outputs.executable,
runfiles = runfiles)
exec_rule2 = rule(
implementation = _impl2,
executable = True,
attrs = {
"deps": attr.label_list(),
},
)
BUILD.bazel:
load(":defs.bzl", "exec_rule1", "exec_rule2")
exec_rule1(name = "foo")
exec_rule1(name = "bar")
exec_rule2(name = "baz", deps = [":foo", ":bar"])
and then running it:
$ bazel run //:baz
INFO: Analyzed target //:baz (4 packages loaded, 19 targets configured).
INFO: Found 1 target...
Target //:baz up-to-date:
bazel-bin/baz
INFO: Elapsed time: 0.211s, Critical Path: 0.01s
INFO: 0 processes.
INFO: Build completed successfully, 6 total actions
INFO: Build completed successfully, 6 total actions
foo 123
bar 123
I managed to achieve this by implementing DefaultInfo in rule.
def build_all_impl(ctx):
targets = ctx.attr.targets
run_files = []
for target in targets:
run_files = run_files + target.files.to_list()
DefaultInfo(
runfiles = ctx.runfiles(run_files),
)
build_all = rule(
implementation = build_all_impl,
attrs = {
"targets": attr.label_list(
doc = "target to build",
),
},
)
And then by running build_all rule
build_all(
name = "all",
targets = [
":target-1",
":target-2",
...
],
)
I am running into a build error for a bazel target. I have checked the code and I could not find anything wrong. I suspect I might be looking at the wrong version of the code. Is there a way to print out tag/versions/hash of the all the code packages a target is dependent on?
bazel query 'deps(//my:target)' --nohost_deps --noimplicit_deps --output=build
This will print out the BUILD targets of all explicit dependencies a target depends on in the BUILD file format. Here's an example output of running that command in a real project:
# /home/user/code/rules_jvm_external/tests/integration/BUILD:12:1
java_test(
name = "GlobalArtifactExclusionsTest",
deps = ["#global_exclusion_testing//:com_diffplug_durian_durian_core", "#global_exclusion_testing//:com_google_guava_guava", "#global_exclusion_testing//:com_squareup_okhttp3_okhttp", "#maven//:org_hamcrest_hamcrest", "#maven//:org_hamcrest_hamcrest_core"],
srcs = ["//tests/integration:GlobalArtifactExclusionsTest.java"],
test_class = "com.jvm.external.GlobalArtifactExclusionsTest",
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/maven/BUILD:103:1
jvm_import(
name = "org_hamcrest_hamcrest_core",
tags = ["maven_coordinates=org.hamcrest:hamcrest-core:2.1"],
jars = ["#maven//:v1/https/jcenter.bintray.com/org/hamcrest/hamcrest-core/2.1/hamcrest-core-2.1.jar"],
deps = ["#maven//:org_hamcrest_hamcrest"],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/maven/BUILD:115:1
jvm_import(
name = "org_hamcrest_hamcrest",
tags = ["maven_coordinates=org.hamcrest:hamcrest:2.1"],
jars = ["#maven//:v1/https/jcenter.bintray.com/org/hamcrest/hamcrest/2.1/hamcrest-2.1.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:79:1
jvm_import(
name = "com_squareup_okhttp3_okhttp",
tags = ["maven_coordinates=com.squareup.okhttp3:okhttp:3.14.1"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.14.1/okhttp-3.14.1.jar"],
deps = ["#global_exclusion_testing//:com_squareup_okio_okio"],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:91:1
jvm_import(
name = "com_squareup_okio_okio",
tags = ["maven_coordinates=com.squareup.okio:okio:1.17.2"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/squareup/okio/okio/1.17.2/okio-1.17.2.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:52:1
jvm_import(
name = "com_google_guava_guava",
tags = ["maven_coordinates=com.google.guava:guava:27.0-jre"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/guava/guava/27.0-jre/guava-27.0-jre.jar"],
deps = ["#global_exclusion_testing//:com_google_guava_listenablefuture", "#global_exclusion_testing//:com_google_code_findbugs_jsr305", "#global_exclusion_testing//:com_google_guava_failureaccess", "#global_exclusion_testing//:com_google_errorprone_error_prone_annotations", "#global_exclusion_testing//:org_checkerframework_checker_qual"],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:102:1
jvm_import(
name = "org_checkerframework_checker_qual",
tags = ["maven_coordinates=org.checkerframework:checker-qual:2.5.2"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/org/checkerframework/checker-qual/2.5.2/checker-qual-2.5.2.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:68:1
jvm_import(
name = "com_google_guava_listenablefuture",
tags = ["maven_coordinates=com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:41:1
jvm_import(
name = "com_google_guava_failureaccess",
tags = ["maven_coordinates=com.google.guava:failureaccess:1.0"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/guava/failureaccess/1.0/failureaccess-1.0.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:30:1
jvm_import(
name = "com_google_errorprone_error_prone_annotations",
tags = ["maven_coordinates=com.google.errorprone:error_prone_annotations:2.2.0"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:19:1
jvm_import(
name = "com_google_code_findbugs_jsr305",
tags = ["maven_coordinates=com.google.code.findbugs:jsr305:3.0.2"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:8:1
jvm_import(
name = "com_diffplug_durian_durian_core",
tags = ["maven_coordinates=com.diffplug.durian:durian-core:1.2.0"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/diffplug/durian/durian-core/1.2.0/durian-core-1.2.0.jar"],
deps = [],
)
I'm trying to create a macro in Bazel to wrap java_test to run testng, however I'm running into trouble passing TestNG the filename
So far I have
load("#bazel_skylib//:lib.bzl", "paths")
def java_testng(file, deps=[], **kwargs):
native.java_test(
name = paths.split_extension(file)[0],
srcs = [file],
use_testrunner=False,
main_class='org.testng.TestNG',
deps = [
"//third_party:org_testng_testng"
] + deps,
args=[file],
**kwargs
)
However args seems to be a non-existent runfile.
Help appreciated on the correct value for args
Here is a sample usage I would like
java_testng(
file = "SomeFakeTest.java",
deps = [
"//:resources",
"//third_party:com_fasterxml_jackson_core_jackson_databind",
"//third_party:org_assertj_assertj_core",
],
)
Here is the solution I came up with
load("#bazel_skylib//:lib.bzl", "paths")
def java_testng(file, deps=[], size="small", **kwargs):
native.java_library(
name = paths.split_extension(file)[0] + "-lib",
deps = [
"//third_party:org_testng_testng"
] + deps,
srcs = [file]
)
native.java_test(
name = paths.split_extension(file)[0],
use_testrunner=False,
main_class='org.testng.TestNG',
runtime_deps = [
"//third_party:org_testng_testng",
paths.split_extension(file)[0] + "-lib"
],
data = [file],
size = size,
args=["-testclass $(location " + file + ")"],
**kwargs
)
I dont know why you used a macro, I manage to call testng without.
See my solution below:
I create my program jar (using some Annotation Processor)
I create my test jar (using some Annotation Processor)
I call testng via java_test().
The alone thing I didn't figure out: how to not hardcode the "libmy-model-test-lib.jar"
java_library(
name = "my-model",
srcs = glob(["src/main/java/**/*.java"]),
resources = glob(["src/main/resources/**"]),
deps = [
"#commons_logging_jar//jar",
":lombok",
":mysema_query",
...
],
)
java_library(
name = "my-model-test-lib",
srcs = glob(["src/test/java/**/*.java"]),
deps = [
"#org_hamcrest_core_jar//jar",
"#commons_logging_jar//jar",
":lombok",
":mysema_query",
...
"#assertj_jar//jar",
"#mockito_jar//jar",
"#testng_jar//jar",
],
)
java_test(
name = "AllTests",
size = "small",
runtime_deps = [
":my-model-test-lib",
":my-model",
"#org_jboss_logging_jar//jar",
"#org_objenesis_jar//jar",
"#com_beust_jcommander//jar",
],
use_testrunner=False,
main_class='org.testng.TestNG',
args=['-testjar','libmy-model-test-lib.jar','-verbose','2'],
)
java_plugin(
name = "lombok_plugin",
processor_class = "lombok.launch.AnnotationProcessorHider$AnnotationProcessor",
deps = ["#lombok_jar//jar"],
)
java_library(
name = "lombok",
exports = ["#lombok_jar//jar"],
exported_plugins = [":lombok_plugin"],
)
java_plugin(
name = "mysema_query_plugin",
processor_class = "com.mysema.query.apt.jpa.JPAAnnotationProcessor",
deps = [
"#querydsl_apt_jar//jar",
"#mysema_codegen_jar//jar",
"#javax_persistence_jar//jar",
"#querydsl_codegen_jar//jar",
"#guava_jar//jar",
"#querydsl_core_jar//jar",
"#javax_inject_jar//jar",
],
)
java_library(
name = "mysema_query",
exports = ["#querydsl_apt_jar//jar"],
exported_plugins = [":mysema_query_plugin"],
)
java_plugin(
name = "mockito_plugin",
processor_class = "",
deps = ["#mockito_jar//jar"],
)
java_library(
name = "mockito",
exports = ["#mockito_jar//jar"],
exported_plugins = [":mockito_plugin"],
)