Is there a way to change private attribute of an Aspect progammatically? - bazel

Say I have something like the following.
def _foo_aspect_impl(target, ctx):
# operations
return FooInfo(...)
foo_aspect = aspect(implementation = _foo_aspect_impl,
attr_aspects = ['deps'],
attrs = dict(
_tool = attr.Label(
# defs
),
)
)
def _foo_rule_impl(ctx):
for dep in ctx.attr.deps:
# do something with `dep[FooInfo]`
return DefaultInfo(...)
foo_rule = rule(
implementation = _foo_rule_impl,
attrs = dict(
"deps": attr.label_list(
aspects = [foo_aspect],
)
)
)
Is there a way to change the value of foo_aspect.attr._tool, either in WORKSPACE, or at the invocation of foo_rule? Former is much preferable.
The use case being that version and repository origin of _tool might change from project to project. When aspect resides in a repository shared by two projects, it does not make sense to create two branches for these two projects just for versioning of _tool.

After a lot of head scratching I found a rather complicated way of doing it.
Since the only thing that seems to be configuarable in WORKSPACE.bazel during loading phase is other workspaces / repositories, one could actually use target aliasing together with repository loading to mutiplex configuarable targets.
Here is how it works.
First, define a new repository rule new_virtual_repository, which creates repositories that does nothing but loading the BUILD.bazel and WORKSPACE.bazel files.
# repo.bzl
load("#bazel_tools//tools/build_defs/repo:utils.bzl", "workspace_and_buildfile")
def _new_virtual_repo_impl(ctx):
# Create build file
workspace_and_buildfile(ctx)
return ctx.attr
new_virtual_repository = repository_rule(
implementation = _new_virtual_repo_impl,
attrs = dict(
build_file = attr.label(allow_single_file = True),
build_file_content = attr.string(),
workspace_file = attr.label(allow_single_file = True),
workspace_file_content = attr.string(),
),
local = True,
)
Then, create an extension file config.bzl which implements a function that generates the BUILD.bazel file and load the virtual repository:
# config.bzl
load(":repo.bzl", "new_virtual_repository")
def config(tool):
build_file_content = """
alias(
name = "tool",
actual = "%s",
""" % (tool)
new_virtual_repository(
name = "config_repo",
build_file_content = build_file_content,
)
Now in the aspect specification:
# aspect.bzl
foo_aspect = aspect(
...
attrs = dict(
_tool = attr.Label("#config_repo//:tool"),
)
)
Finally, configure the actual tool in WORKSPACE.bazel:
# WORKSPACE.bazel
load("//:config.bzl", "config")
config(tool="<actual_tool_label>")

Related

What is the most efficient way to extract/collect files from a list of targets/providers in Bazel?

I'm writing some rules and learning Starlark as I progress.
Assume I have my own provider:
ModularResources = provider(
doc = "Modular resources",
fields = {
"artifactId": "Former Maven artifact id (don't ask me why)",
"srcs": "List of labels (a glob(..) thing)",
},
)
def _modular_resources_impl(ctx):
return ModularResources(
artifactId = ctx.attr.artifactId,
srcs = ctx.attr.srcs,
)
modular_resources = rule(
implementation = _modular_resources_impl,
attrs = {
"artifactId": attr.string(
mandatory = True,
),
"srcs": attr.label_list(
allow_files = True,
mandatory = True,
),
},
)
Then I have a generator rule which requires these:
some_generator = rule(
attrs = {
"deps": attr.label_list(
providers = [ ModularResources ]
),
...
},
...
)
In my implementation I discovered that I need to do a couple of unwraps to get the files:
def _get_files(deps):
result = []
for dep in deps:
for target in dep[ModularResources].srcs:
result += target.files.to_list()
return result
Is there a more efficient way to perform the collection?
As to why I'm doing this, the generator actually needs a special list of files like this:
def _format_files(deps):
formatted = ""
for dep in deps:
for target in dep[ModularResources].srcs:
formatted += ",".join([dep[ModularResources].artifactId + ":" + f.path for f in target.files.to_list()])
return formatted
FWIW, here is an example how this is used:
a/BUILD:
modular_resources(
name = "generator_resources",
srcs = glob(
["some/path/**/*.whatever"],
),
artifactId = "a",
visibility = ["//visibility:public"],
)
b/BUILD:
some_generator(
name = "...",
deps = [
"//a:generator_resources"
]
)
If you want to trade memory for better performance, maybe the operation can more easily be parallelised by blaze if it's done in the provider instead:
def _modular_resources_impl(ctx):
return ModularResources(
artifactId = ctx.attr.artifactId,
formatted_srcs = ",".join([artifactId + ":" + f.path for f in ctx.files.src])
)

Waf Task should use variant directory

I am creating files in a Task, the example code looks as follows:
from waflib import Task, TaskGen
def build(bld):
bld(features='write_file')
class xyz(Task.Task):
def run(self):
self.generator.path.get_bld().make_node(self.outputs[0].relpath())
#TaskGen.feature('write_file')
def make_tasks(self):
for x in range(20):
src = bld.path.find_node('wscript')
tgt = src.change_ext('.'+str(x))
tsk = self.create_task('xyz', src=src, tgt=tgt)
Now all files get placed inside the build directory, but I want them to be placed in build\abc. How do I do that? For normal builds, I can use a BuildContext and specify a variant:
from waflib.Build import BuildContext
class abc(BuildContext):
variant = 'abc'
But I can't get the BuildContext working on that example, and setting variant on a Task.Task does not work.
Update
I update the example based on neuros answer:
A minimal working example with this code looks like this:
from waflib import Task, TaskGen, Configure
Configure.autoconfig = True
def configure(cnf):
cnf.path.get_src().make_node('a/wscript').write('')
def build(bld):
bld(features='write_file')
class xyz(Task.Task):
def run(self):
self.generator.path.get_bld().find_or_declare(self.outputs[0].abspath()).write('')
#TaskGen.feature('write_file')
def make_tasks(self):
srcs = bld.path.ant_glob('**/wscript', excl='build')
for src in srcs:
build_dir_of_src = src.get_bld().parent
my_sub_node = build_dir_of_src.make_node('xyz')
my_sub_node.mkdir()
tgt_basename = src.name
tgt = my_sub_node.make_node(tgt_basename)
tsk = self.create_task('xyz', src=src, tgt=tgt)
The problem is that this creates the following:
build\xyz\wscript
build\a\xyz\wscript
But I want this:
build\xyz\wscript
build\xyz\a\wscript
So I just to create the folder xyz between build and what ever the tgt is. So exactly the behavior of variant in a BuildContext.
When tasks execute, you are already in a variant build dir. To control the outputs of a task you have to use the waflib.Node class API. In your example, change_ext get the source build directory equivalent and change the extension. To insert a subdir:
# [...]
build_dir_of_src = src.get_bld().parent
my_sub_node = build_dir_of_src.make_node("my_sub_dir")
my_sub_node.mkdir()
tgt_basename = src.change_ext('.' + str(x)).name
tgt = my_sub_node.make_node(tgt_basename)
# [...]
If you want to insert a "variant style" directory, you can use bld.bldnode (untested but you see the point, use bld.bldnode):
def get_my_bld(bld, src_node):
variant_like_dirname = "xyz"
my_bld_node = bld.bldnode.make_node(variant_like_dirname)
my_bld_node.mkdir()
rp = src_node.get_bld().relpath(bld.bldnode)
my_bld_target = my_bld_node.make_node(rp)
return my_bld_tgt
# [...]
tgt = get_my_bld(bld, src)
# [...]

Bazel- How to get all transitive sources of a target

I have a need to write a rule which zips up all transitive config files (*.foo) of an executable (can be custom rule, java_binary and docker container_image).
The config files can appear on the srcs attribute of any attribute of the executable (tars, deps, runtime_deps, etc)
This sounds like it should be rather easy to do with an aspect attached to my rule but I lost my way between the various examples.
sources_aspect.bzl:
SourceFiles = provider(
fields = {
"transitive_source_files": "list of transitive source files of a target",
},
)
#add 'resources' ? if so _accumulate_transitive_config_files needs to check for dep in deps if ConfigFiles in dep
SourceAttr = ["tars", "deps", "runtime_deps", "exports"]
def _accumulate_transitive_source_files(accumulated, deps):
return depset(
transitive = [dep[SourceFiles].transitive_source_files for dep in deps] + [accumulated],
)
def _collect_current_source_files(srcs):
return [file for src in srcs for file in src.files.to_list()]
def _collect_source_files_aspect_impl(target, ctx):
current_source_files = []
if hasattr(ctx.rule.attr, "srcs"):
current_source_files = _collect_current_source_files(ctx.rule.attr.srcs)
if hasattr(ctx.rule.attr, "resources"):
current_source_files = current_source_files + _collect_current_source_files(ctx.rule.attr.resources)
accumulated_source_files = depset(current_source_files)
for attr in SourceAttr:
if hasattr(ctx.rule.attr, attr):
accumulated_source_files = _accumulate_transitive_source_files(accumulated_source_files, getattr(ctx.rule.attr, attr))
return [SourceFiles(transitive_source_files = accumulated_source_files)]
collect_source_files_aspect = aspect(
implementation = _collect_source_files_aspect_impl,
attr_aspects = SourceAttr,
)
sources.bzl
load("//sources/src/main:sources_aspect.bzl", "SourceFiles", "collect_source_files_aspect")
def _owner_to_bazel_file(fileLabel):
workspace = fileLabel.workspace_root
package = fileLabel.package
if 0 < len(workspace):
workspace = workspace + "/"
if 0 < len(package):
package = package + "/"
return workspace + package + "BUILD.bazel"
def _collect_source_files_rule_impl(ctx):
metadata = [ctx.attr.group_id, ctx.attr.artifact_id]
paths = sorted([f.path for f in ctx.attr.main_artifact_name[SourceFiles].transitive_source_files.to_list()])
owners = sorted(depset([_owner_to_bazel_file(f.owner) for f in ctx.attr.main_artifact_name[SourceFiles].transitive_source_files.to_list()] + [_owner_to_bazel_file(ctx.label)]).to_list())
ctx.actions.write(ctx.outputs.sources, "\n".join(metadata + paths + owners))
ctx.actions.write(ctx.outputs.source_files, "{\"groupId\": \"%s\", \"artifactId\": \"%s\", \"sources\": %s, \"buildFiles\": %s}" % (ctx.attr.group_id, ctx.attr.artifact_id, paths, owners))
return DefaultInfo(
runfiles = ctx.runfiles(files = [ctx.outputs.sources, ctx.outputs.source_files]),
)
source_files = rule(
implementation = _collect_source_files_rule_impl,
attrs = {
"main_artifact_name": attr.label(aspects = [collect_source_files_aspect]),
"group_id": attr.string(mandatory = True),
"artifact_id": attr.string(mandatory = True),
},
outputs = {"sources": "%{name}.sources.txt", "source_files": "%{name}.sources.json"},
)

How to organize third-party java_library rules?

I have many BUILD files that require Jetty or other common Java libraries (SLF4J, Lucene, Guava, etc.). Each of these has a set of other JARs that it references. I would like to know the best practice for organizing these declarations and their dependencies in a large project.
For example, using generate_workspace via bazel run //src/tools/generate_workspace -- --artifact=org.eclipse.jetty:jetty-server:9.3.8.v20160314 I get the following BUILD
# The following dependencies were calculated from:
# org.eclipse.jetty:jetty-server:9.3.8.v20160314
java_library(
name = "org_eclipse_jetty_jetty_http",
visibility = ["//visibility:public"],
exports = [
"#org_eclipse_jetty_jetty_http//jar",
"#org_eclipse_jetty_jetty_util//jar",
],
)
java_library(
name = "org_eclipse_jetty_jetty_util",
visibility = ["//visibility:public"],
exports = [
"#org_eclipse_jetty_jetty_util//jar",
],
)
java_library(
name = "javax_servlet_javax_servlet_api",
visibility = ["//visibility:public"],
exports = [
"#javax_servlet_javax_servlet_api//jar",
],
)
java_library(
name = "org_eclipse_jetty_jetty_server",
visibility = ["//visibility:public"],
exports = [
"#org_eclipse_jetty_jetty_server//jar",
"#javax_servlet_javax_servlet_api//jar",
"#org_eclipse_jetty_jetty_http//jar",
"#org_eclipse_jetty_jetty_io//jar",
"#org_eclipse_jetty_jetty_util//jar",
],
)
java_library(
name = "org_eclipse_jetty_jetty_io",
visibility = ["//visibility:public"],
exports = [
"#org_eclipse_jetty_jetty_io//jar",
"#org_eclipse_jetty_jetty_util//jar",
],
)
and WORKSPACE
# The following dependencies were calculated from:
# org.eclipse.jetty:jetty-server:9.3.8.v20160314
# org.eclipse.jetty:jetty-server:jar:9.3.8.v20160314
maven_jar(
name = "org_eclipse_jetty_jetty_http",
artifact = "org.eclipse.jetty:jetty-http:9.3.8.v20160314",
)
# org.eclipse.jetty:jetty-http:jar:9.3.8.v20160314
# org.eclipse.jetty:jetty-io:jar:9.3.8.v20160314
maven_jar(
name = "org_eclipse_jetty_jetty_util",
artifact = "org.eclipse.jetty:jetty-util:9.3.8.v20160314",
)
# org.eclipse.jetty:jetty-server:jar:9.3.8.v20160314
maven_jar(
name = "javax_servlet_javax_servlet_api",
artifact = "javax.servlet:javax.servlet-api:3.1.0",
)
maven_jar(
name = "org_eclipse_jetty_jetty_server",
artifact = "org.eclipse.jetty:jetty-server:9.3.8.v20160314",
)
# org.eclipse.jetty:jetty-server:jar:9.3.8.v20160314
maven_jar(
name = "org_eclipse_jetty_jetty_io",
artifact = "org.eclipse.jetty:jetty-io:9.3.8.v20160314",
)
files.
I have a dependency on jetty-server and jetty-util in many projects. Is there a better practice than repeating this information in each BUILD file?
Generally you'd put the generate_workspace-generated BUILD file in the root of your workspace (next to your WORKSPACE file) and then, in other BUILD files, you'd reference whatever target they needed to depend on. For example, in src/main/java/com/your-project/subcomponent/BUILD, you might say:
java_library(
name = "my-servlet",
srcs = glob(["*.java"]),
deps = [
"//:javax_servlet_javax_servlet_api",
# other deps...
],
)

How to use a static library created by a custom task?

I want use waf to trigger a makefile to build an other library. For this I created the following task:
def build(bld):
def run(self):
bld_dir = self.generator.bld.path.get_bld()
src_dir = self.inputs[0].parent
tgt = self.outputs[0]
tgt_dir = bld_dir.make_node(os.path.splitext(tgt.name)[0])
cmd = 'BUILDDIR="{tgt_dir}" make config gdb=1 debug=1 cc={cc} && BUILDDIR=" {tgt_dir}" make'.format(
tgt_dir = tgt_dir.abspath(),
cc = self.env.get_flat("CC"))
self.exec_command(cmd, cwd=src_dir.abspath())
return self.exec_command(['cp', lib.abspath(), tgt.abspath()],
cwd=tgt_dir.abspath())
bld(
rule = run,
source = "Makefile",
target = 'metis',
)
How can I tell waf that the task created a static library, so that I can use "metis" in a use keyword:
bld(
features = "cxx cxxprogramm"
source = "main.cpp",
target = 'main',
use = 'metis'
)
To finally solve the problem I created a on link_task, that basicly does notthing (similar to the fake_lib in ccroot.py):
from waflib.TaskGen import feature, after_method
from waflib.Tools.ccroot import stlink_task
class custom_stlib(stlink_task):
""" Dummy link task """
pass
#feature("custom_stlib")
def custom_lib(self):
self.env['custom_stlib_PATTERN'] = 'lib%s.a'
self.link_task = self.create_task('custom_stlib', [])
self.link_task.add_target(self.target)
def build(bld):
# ...
bld(
features = "cxx custom_stlib",
target = 'metis',
after = "metis_bld",
)

Resources