feat(third_party/bazel): Check in rules_haskell from Tweag

This commit is contained in:
Vincent Ambo 2019-07-04 11:18:12 +01:00
parent 2eb1dc26e4
commit f723b8b878
479 changed files with 51484 additions and 0 deletions

View file

@ -0,0 +1,563 @@
"""Actions for compiling Haskell source code"""
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
":private/path_utils.bzl",
"declare_compiled",
"module_name",
"target_unique_name",
)
load(":private/pkg_id.bzl", "pkg_id")
load(":private/version_macros.bzl", "version_macro_includes")
load(
":providers.bzl",
"GhcPluginInfo",
"get_libs_for_ghc_linker",
"merge_HaskellCcInfo",
)
load(":private/set.bzl", "set")
def _process_hsc_file(hs, cc, hsc_flags, hsc_inputs, hsc_file):
"""Process a single hsc file.
Args:
hs: Haskell context.
cc: CcInteropInfo, information about C dependencies.
hsc_flags: extra flags to pass to hsc2hs
hsc_inputs: extra file inputs for the hsc2hs command
hsc_file: hsc file to process.
Returns:
(File, string): Haskell source file created by processing hsc_file and
new import directory containing the produced file.
"""
args = hs.actions.args()
# Output a Haskell source file.
hsc_dir_raw = paths.join("_hsc", hs.name)
hs_out = declare_compiled(hs, hsc_file, ".hs", directory = hsc_dir_raw)
args.add_all([hsc_file.path, "-o", hs_out.path])
args.add_all(["-c", cc.tools.cc])
args.add_all(["-l", cc.tools.cc])
args.add("-ighcplatform.h")
args.add("-ighcversion.h")
args.add_all(["--cflag=" + f for f in cc.cpp_flags])
args.add_all(["--cflag=" + f for f in cc.compiler_flags])
args.add_all(["--cflag=" + f for f in cc.include_args])
args.add_all(["--lflag=" + f for f in cc.linker_flags])
args.add_all(hsc_flags)
# Add an empty PATH variable if not already specified in hs.env.
# Needed to avoid a "Couldn't read PATH" error on Windows.
#
# On Unix platforms, though, we musn't set PATH as it is automatically set up
# by the run action, unless already set in the env parameter. This triggers
# build errors when using GHC bindists on Linux.
if hs.env.get("PATH") == None and hs.toolchain.is_windows:
hs.env["PATH"] = ""
hs.actions.run(
inputs = depset(transitive = [
depset(cc.hdrs),
depset([hsc_file]),
depset(cc.files),
depset(hsc_inputs),
]),
outputs = [hs_out],
mnemonic = "HaskellHsc2hs",
executable = hs.tools.hsc2hs,
arguments = [args],
env = hs.env,
)
idir = paths.join(
hs.bin_dir.path,
hs.label.package,
hsc_dir_raw,
)
return hs_out, idir
def _compilation_defaults(hs, cc, java, dep_info, plugin_dep_info, srcs, import_dir_map, extra_srcs, user_compile_flags, with_profiling, my_pkg_id, version, plugins):
"""Compute variables common to all compilation targets (binary and library).
Returns:
struct with the following fields:
args: default argument list
compile_flags: arguments that were used to compile the package
inputs: default inputs
input_manifests: input manifests
outputs: default outputs
objects_dir: object files directory
interfaces_dir: interface files directory
source_files: set of files that contain Haskell modules
extra_source_files: depset of non-Haskell source files
import_dirs: c2hs Import hierarchy roots
env: default environment variables
"""
compile_flags = []
# GHC expects the CC compiler as the assembler, but segregates the
# set of flags to pass to it when used as an assembler. So we have
# to set both -optc and -opta.
cc_args = [
"-optc" + f
for f in cc.compiler_flags
] + [
"-opta" + f
for f in cc.compiler_flags
]
compile_flags += cc_args
interface_dir_raw = "_iface_prof" if with_profiling else "_iface"
object_dir_raw = "_obj_prof" if with_profiling else "_obj"
# Declare file directories.
#
# NOTE: We could have used -outputdir here and a single output
# directory. But keeping interface and object files separate has
# one advantage: if interface files are invariant under
# a particular code change, then we don't need to rebuild
# downstream.
if my_pkg_id:
# If we're compiling a package, put the interfaces inside the
# package directory.
interfaces_dir = hs.actions.declare_directory(
paths.join(
pkg_id.to_string(my_pkg_id),
interface_dir_raw,
),
)
else:
interfaces_dir = hs.actions.declare_directory(
paths.join(interface_dir_raw, hs.name),
)
objects_dir = hs.actions.declare_directory(
paths.join(object_dir_raw, hs.name),
)
# Default compiler flags.
compile_flags += hs.toolchain.compiler_flags
compile_flags += user_compile_flags
# Work around macOS linker limits. This fix has landed in GHC HEAD, but is
# not yet in a release; plus, we still want to support older versions of
# GHC. For details, see: https://phabricator.haskell.org/D4714
if hs.toolchain.is_darwin:
compile_flags += ["-optl-Wl,-dead_strip_dylibs"]
compile_flags.extend(
pkg_info_to_compile_flags(
expose_packages(
dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = my_pkg_id,
custom_package_databases = None,
version = version,
),
),
)
compile_flags.extend(
pkg_info_to_compile_flags(
expose_packages(
plugin_dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = my_pkg_id,
custom_package_databases = None,
version = version,
),
for_plugin = True,
),
)
header_files = []
boot_files = []
source_files = set.empty()
# Forward all "-D" and "-optP-D" flags to hsc2hs
hsc_flags = []
hsc_flags += ["--cflag=" + x for x in user_compile_flags if x.startswith("-D")]
hsc_flags += ["--cflag=" + x[len("-optP"):] for x in user_compile_flags if x.startswith("-optP-D")]
hsc_inputs = []
if version:
(version_macro_headers, version_macro_flags) = version_macro_includes(dep_info)
hsc_flags += ["--cflag=" + x for x in version_macro_flags]
hsc_inputs += set.to_list(version_macro_headers)
# Add import hierarchy root.
# Note that this is not perfect, since GHC requires hs-boot files
# to be in the same directory as the corresponding .hs file. Thus
# the two must both have the same root; i.e., both plain files,
# both in bin_dir, or both in genfiles_dir.
import_dirs = set.from_list([
hs.src_root,
paths.join(hs.bin_dir.path, hs.src_root),
paths.join(hs.genfiles_dir.path, hs.src_root),
])
for s in srcs:
if s.extension == "h":
header_files.append(s)
elif s.extension == "hsc":
s0, idir = _process_hsc_file(hs, cc, hsc_flags, hsc_inputs, s)
set.mutable_insert(source_files, s0)
set.mutable_insert(import_dirs, idir)
elif s.extension in ["hs-boot", "lhs-boot"]:
boot_files.append(s)
else:
set.mutable_insert(source_files, s)
if s in import_dir_map:
idir = import_dir_map[s]
set.mutable_insert(import_dirs, idir)
compile_flags += ["-i{0}".format(d) for d in set.to_list(import_dirs)]
# Write the -optP flags to a parameter file because they can be very long on Windows
# e.g. 27Kb for grpc-haskell
# Equivalent to: compile_flags += ["-optP" + f for f in cc.cpp_flags]
optp_args_file = hs.actions.declare_file("optp_args_%s" % hs.name)
optp_args = hs.actions.args()
optp_args.add_all(cc.cpp_flags)
optp_args.set_param_file_format("multiline")
hs.actions.write(optp_args_file, optp_args)
compile_flags += ["-optP@" + optp_args_file.path]
compile_flags += cc.include_args
locale_archive_depset = (
depset([hs.toolchain.locale_archive]) if hs.toolchain.locale_archive != None else depset()
)
# This is absolutely required otherwise GHC doesn't know what package it's
# creating `Name`s for to put them in Haddock interface files which then
# results in Haddock not being able to find names for linking in
# environment after reading its interface file later.
if my_pkg_id != None:
unit_id_args = [
"-this-unit-id",
pkg_id.to_string(my_pkg_id),
"-optP-DCURRENT_PACKAGE_KEY=\"{}\"".format(pkg_id.to_string(my_pkg_id)),
]
compile_flags += unit_id_args
args = hs.actions.args()
# Compilation mode. Allow rule-supplied compiler flags to override it.
if hs.mode == "opt":
args.add("-O2")
args.add("-static")
if with_profiling:
args.add("-prof", "-fexternal-interpreter")
# Common flags
args.add_all([
"-v0",
"-no-link",
"-fPIC",
"-hide-all-packages",
# Should never trigger in sandboxed builds, but can be useful
# to debug issues in non-sandboxed builds.
"-Wmissing-home-modules",
])
# Output directories
args.add_all([
"-odir",
objects_dir.path,
"-hidir",
interfaces_dir.path,
])
# Interface files with profiling have to have the extension "p_hi":
# https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/packages.html#installedpackageinfo-a-package-specification
# otherwise we won't be able to register them with ghc-pkg.
if with_profiling:
args.add_all([
"-hisuf",
"p_hi",
"-osuf",
"p_o",
])
args.add_all(compile_flags)
# Plugins
for plugin in plugins:
args.add("-fplugin={}".format(plugin[GhcPluginInfo].module))
for opt in plugin[GhcPluginInfo].args:
args.add_all(["-fplugin-opt", "{}:{}".format(plugin[GhcPluginInfo].module, opt)])
plugin_tool_inputs = [plugin[GhcPluginInfo].tool_inputs for plugin in plugins]
plugin_tool_input_manifests = [
manifest
for plugin in plugins
for manifest in plugin[GhcPluginInfo].tool_input_manifests
]
# Pass source files
for f in set.to_list(source_files):
args.add(f)
extra_source_files = depset(
transitive = [extra_srcs, depset(header_files), depset(boot_files)],
)
# Transitive library dependencies for runtime.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
merge_HaskellCcInfo(
dep_info.transitive_cc_dependencies,
plugin_dep_info.transitive_cc_dependencies,
),
)
return struct(
args = args,
compile_flags = compile_flags,
inputs = depset(transitive = [
depset(header_files),
depset(boot_files),
set.to_depset(source_files),
extra_source_files,
depset(cc.hdrs),
set.to_depset(dep_info.package_databases),
set.to_depset(dep_info.interface_dirs),
depset(dep_info.static_libraries),
depset(dep_info.static_libraries_prof),
set.to_depset(dep_info.dynamic_libraries),
set.to_depset(plugin_dep_info.package_databases),
set.to_depset(plugin_dep_info.interface_dirs),
depset(plugin_dep_info.static_libraries),
depset(plugin_dep_info.static_libraries_prof),
set.to_depset(plugin_dep_info.dynamic_libraries),
depset(library_deps),
depset(ld_library_deps),
java.inputs,
locale_archive_depset,
depset(transitive = plugin_tool_inputs),
depset([optp_args_file]),
]),
input_manifests = plugin_tool_input_manifests,
objects_dir = objects_dir,
interfaces_dir = interfaces_dir,
outputs = [objects_dir, interfaces_dir],
source_files = source_files,
extra_source_files = depset(transitive = [extra_source_files, depset([optp_args_file])]),
import_dirs = import_dirs,
env = dicts.add(
ghc_env,
java.env,
hs.env,
),
)
def _hpc_compiler_args(hs):
hpcdir = "{}/{}/.hpc".format(hs.bin_dir.path, hs.package_root)
return ["-fhpc", "-hpcdir", hpcdir]
def _coverage_datum(mix_file, src_file, target_label):
return struct(
mix_file = mix_file,
src_file = src_file,
target_label = target_label,
)
def compile_binary(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs,
ls_modules,
import_dir_map,
extra_srcs,
user_compile_flags,
dynamic,
with_profiling,
main_function,
version,
inspect_coverage = False,
plugins = []):
"""Compile a Haskell target into object files suitable for linking.
Returns:
struct with the following fields:
object_files: list of static object files
object_dyn_files: list of dynamic object files
modules: set of module names
source_files: set of Haskell source files
"""
c = _compilation_defaults(hs, cc, java, dep_info, plugin_dep_info, srcs, import_dir_map, extra_srcs, user_compile_flags, with_profiling, my_pkg_id = None, version = version, plugins = plugins)
c.args.add_all(["-main-is", main_function])
if dynamic:
# For binaries, GHC creates .o files even for code to be
# linked dynamically. So we have to force the object suffix to
# be consistent with the dynamic object suffix in the library
# case.
c.args.add_all(["-dynamic", "-osuf dyn_o"])
coverage_data = []
if inspect_coverage:
c.args.add_all(_hpc_compiler_args(hs))
for src_file in srcs:
module = module_name(hs, src_file)
mix_file = hs.actions.declare_file(".hpc/{module}.mix".format(module = module))
coverage_data.append(_coverage_datum(mix_file, src_file, hs.label))
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = c.inputs,
input_manifests = c.input_manifests,
outputs = c.outputs + [datum.mix_file for datum in coverage_data],
mnemonic = "HaskellBuildBinary" + ("Prof" if with_profiling else ""),
progress_message = "HaskellBuildBinary {}".format(hs.label),
env = c.env,
arguments = c.args,
)
if with_profiling:
exposed_modules_file = None
else:
exposed_modules_file = hs.actions.declare_file(
target_unique_name(hs, "exposed-modules"),
)
hs.actions.run(
inputs = [c.interfaces_dir, hs.toolchain.global_pkg_db],
outputs = [exposed_modules_file],
executable = ls_modules,
arguments = [
c.interfaces_dir.path,
hs.toolchain.global_pkg_db.path,
"/dev/null", # no hidden modules
"/dev/null", # no reexported modules
exposed_modules_file.path,
],
use_default_shell_env = True,
)
return struct(
objects_dir = c.objects_dir,
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
compile_flags = c.compile_flags,
exposed_modules_file = exposed_modules_file,
coverage_data = coverage_data,
)
def compile_library(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs,
ls_modules,
other_modules,
exposed_modules_reexports,
import_dir_map,
extra_srcs,
user_compile_flags,
with_shared,
with_profiling,
my_pkg_id,
plugins = []):
"""Build arguments for Haskell package build.
Returns:
struct with the following fields:
interfaces_dir: directory containing interface files
interface_files: list of interface files
object_files: list of static object files
object_dyn_files: list of dynamic object files
compile_flags: list of string arguments suitable for Haddock
modules: set of module names
source_files: set of Haskell module files
import_dirs: import directories that should make all modules visible (for GHCi)
"""
c = _compilation_defaults(hs, cc, java, dep_info, plugin_dep_info, srcs, import_dir_map, extra_srcs, user_compile_flags, with_profiling, my_pkg_id = my_pkg_id, version = my_pkg_id.version, plugins = plugins)
if with_shared:
c.args.add("-dynamic-too")
coverage_data = []
if hs.coverage_enabled:
c.args.add_all(_hpc_compiler_args(hs))
for src_file in srcs:
pkg_id_string = pkg_id.to_string(my_pkg_id)
module = module_name(hs, src_file)
mix_file = hs.actions.declare_file(".hpc/{pkg}/{module}.mix".format(pkg = pkg_id_string, module = module))
coverage_data.append(_coverage_datum(mix_file, src_file, hs.label))
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = c.inputs,
input_manifests = c.input_manifests,
outputs = c.outputs + [datum.mix_file for datum in coverage_data],
mnemonic = "HaskellBuildLibrary" + ("Prof" if with_profiling else ""),
progress_message = "HaskellBuildLibrary {}".format(hs.label),
env = c.env,
arguments = c.args,
)
if with_profiling:
exposed_modules_file = None
else:
hidden_modules_file = hs.actions.declare_file(
target_unique_name(hs, "hidden-modules"),
)
hs.actions.write(
output = hidden_modules_file,
content = ", ".join(other_modules),
)
reexported_modules_file = hs.actions.declare_file(
target_unique_name(hs, "reexported-modules"),
)
hs.actions.write(
output = reexported_modules_file,
content = ", ".join(exposed_modules_reexports),
)
exposed_modules_file = hs.actions.declare_file(
target_unique_name(hs, "exposed-modules"),
)
hs.actions.run(
inputs = [
c.interfaces_dir,
hs.toolchain.global_pkg_db,
hidden_modules_file,
reexported_modules_file,
],
outputs = [exposed_modules_file],
executable = ls_modules,
arguments = [
c.interfaces_dir.path,
hs.toolchain.global_pkg_db.path,
hidden_modules_file.path,
reexported_modules_file.path,
exposed_modules_file.path,
],
use_default_shell_env = True,
)
return struct(
interfaces_dir = c.interfaces_dir,
objects_dir = c.objects_dir,
compile_flags = c.compile_flags,
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
exposed_modules_file = exposed_modules_file,
coverage_data = coverage_data,
)

View file

@ -0,0 +1,667 @@
"""Actions for linking object code produced by compilation"""
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"is_static_library",
"ln",
)
load(":private/pkg_id.bzl", "pkg_id")
load(":private/set.bzl", "set")
load(":private/list.bzl", "list")
# tests in /tests/unit_tests/BUILD
def parent_dir_path(path):
"""Returns the path of the parent directory.
For a relative path with just a file, "." is returned.
The path is not normalized.
foo => .
foo/ => foo
foo/bar => foo
foo/bar/baz => foo/bar
foo/../bar => foo/..
Args:
a path string
Returns:
A path list of the form `["foo", "bar"]`
"""
path_dir = paths.dirname(path)
# dirname returns "" if there is no parent directory
# In that case we return the identity path, which is ".".
if path_dir == "":
return ["."]
else:
return path_dir.split("/")
def __check_dots(target, path):
# theres still (non-leading) .. in split
if ".." in path:
fail("the short_path of target {} (which is {}) contains more dots than loading `../`. We cant handle that.".format(
target,
target.short_path,
))
# skylark doesnt allow nested defs, which is a mystery.
def _get_target_parent_dir(target):
"""get the parent dir and handle leading short_path dots,
which signify that the target is in an external repository.
Args:
target: a target, .short_path is used
Returns:
(is_external, parent_dir)
`is_external`: Bool whether the path points to an external repository
`parent_dir`: The parent directory, either up to the runfiles toplel,
up to the external repository toplevel.
Is `[]` if there is no parent dir.
"""
parent_dir = parent_dir_path(target.short_path)
if parent_dir[0] == "..":
__check_dots(target, parent_dir[1:])
return (True, parent_dir[1:])
elif parent_dir[0] == ".":
return (False, [])
else:
__check_dots(target, parent_dir)
return (False, parent_dir)
# tests in /tests/unit_tests/BUILD
def create_rpath_entry(
binary,
dependency,
keep_filename,
prefix = ""):
"""Return a (relative) path that points from `binary` to `dependecy`
while not leaving the current bazel runpath, taking into account weird
corner cases of `.short_path` concerning external repositories.
The resulting entry should be able to be inserted into rpath or similar.
Examples:
bin.short_path=foo/a.so and dep.short_path=bar/b.so
=> create_rpath_entry(bin, dep, False) = ../bar
and
create_rpath_entry(bin, dep, True) = ../bar/b.so
and
create_rpath_entry(bin, dep, True, "$ORIGIN") = $ORIGIN/../bar/b.so
Args:
binary: target of current binary
dependency: target of dependency to relatively point to
keep_filename: whether to point to the filename or its parent dir
prefix: string path prefix to add before the relative path
Returns:
relative path string
"""
(bin_is_external, bin_parent_dir) = _get_target_parent_dir(binary)
(dep_is_external, dep_parent_dir) = _get_target_parent_dir(dependency)
# backup through parent directories of the binary,
# to the runfiles directory
bin_backup = [".."] * len(bin_parent_dir)
# external repositories live in `target.runfiles/external`,
# while the internal repository lives in `target.runfiles`.
# The `.short_path`s of external repositories are strange,
# they start with `../`, but you cannot just append that in
# order to find the correct runpath. Instead you have to use
# the following logic to construct the correct runpaths:
if bin_is_external:
if dep_is_external:
# stay in `external`
path_segments = bin_backup
else:
# backup out of `external`
path_segments = [".."] + bin_backup
elif dep_is_external:
# go into `external`
path_segments = bin_backup + ["external"]
else:
# no special external traversal
path_segments = bin_backup
# then add the parent dir to our dependency
path_segments.extend(dep_parent_dir)
# optionally add the filename
if keep_filename:
path_segments.append(
paths.basename(dependency.short_path),
)
# normalize for good measure and create the final path
path = paths.normalize("/".join(path_segments))
# and add the prefix if applicable
if prefix == "":
return path
else:
return prefix + "/" + path
def _merge_parameter_files(hs, file1, file2):
"""Merge two GHC parameter files into one.
Args:
hs: Haskell context.
file1: The first parameter file.
file2: The second parameter file.
Returns:
File: A new parameter file containing the parameters of both input files.
The file name is based on the file names of the input files. The file
is located next to the first input file.
"""
params_file = hs.actions.declare_file(
file1.basename + ".and." + file2.basename,
sibling = file1,
)
hs.actions.run_shell(
inputs = [file1, file2],
outputs = [params_file],
command = """
cat {file1} {file2} > {out}
""".format(
file1 = file1.path,
file2 = file2.path,
out = params_file.path,
),
)
return params_file
def _darwin_create_extra_linker_flags_file(hs, cc, objects_dir, executable, dynamic, solibs):
"""Write additional linker flags required on MacOS to a parameter file.
Args:
hs: Haskell context.
cc: CcInteropInfo, information about C dependencies.
objects_dir: Directory storing object files.
Used to determine output file location.
executable: The executable being built.
dynamic: Bool: Whether to link dynamically or statically.
solibs: List of dynamic library dependencies.
Returns:
File: Parameter file with additional linker flags. To be passed to GHC.
"""
# On Darwin GHC will pass the dead_strip_dylibs flag to the linker. This
# flag will remove any shared library loads from the binary's header that
# are not directly resolving undefined symbols in the binary. I.e. any
# indirect shared library dependencies will be removed. This conflicts with
# Bazel's builtin cc rules, which assume that the final binary will load
# all transitive shared library dependencies. In particlar shared libraries
# produced by Bazel's cc rules never load shared libraries themselves. This
# causes missing symbols at runtime on MacOS, see #170.
#
# The following work-around applies the `-u` flag to the linker for any
# symbol that is undefined in any transitive shared library dependency.
# This forces the linker to resolve these undefined symbols in all
# transitive shared library dependencies and keep the corresponding load
# commands in the binary's header.
#
# Unfortunately, this prohibits elimination of any truly redundant shared
# library dependencies. Furthermore, the transitive closure of shared
# library dependencies can be large, so this makes it more likely to exceed
# the MACH-O header size limit on MacOS.
#
# This is a horrendous hack, but it seems to be forced on us by how Bazel
# builds dynamic cc libraries.
suffix = ".dynamic.linker_flags" if dynamic else ".static.linker_flags"
linker_flags_file = hs.actions.declare_file(
executable.basename + suffix,
sibling = objects_dir,
)
hs.actions.run_shell(
inputs = solibs,
outputs = [linker_flags_file],
command = """
touch {out}
for lib in {solibs}; do
{nm} -u "$lib" | sed 's/^/-optl-Wl,-u,/' >> {out}
done
""".format(
nm = cc.tools.nm,
solibs = " ".join(["\"" + l.path + "\"" for l in solibs]),
out = linker_flags_file.path,
),
)
return linker_flags_file
def _create_objects_dir_manifest(hs, objects_dir, dynamic, with_profiling):
suffix = ".dynamic.manifest" if dynamic else ".static.manifest"
objects_dir_manifest = hs.actions.declare_file(
objects_dir.basename + suffix,
sibling = objects_dir,
)
if with_profiling:
ext = "p_o"
elif dynamic:
ext = "dyn_o"
else:
ext = "o"
hs.actions.run_shell(
inputs = [objects_dir],
outputs = [objects_dir_manifest],
command = """
find {dir} -name '*.{ext}' > {out}
""".format(
dir = objects_dir.path,
ext = ext,
out = objects_dir_manifest.path,
),
use_default_shell_env = True,
)
return objects_dir_manifest
def _link_dependencies(hs, dep_info, dynamic, binary, args):
"""Configure linker flags and inputs.
Configure linker flags for C library dependencies and runtime dynamic
library dependencies. And collect the C libraries to pass as inputs to
the linking action.
Args:
hs: Haskell context.
dep_info: HaskellInfo provider.
dynamic: Bool: Whether to link dynamically, or statically.
binary: Final linked binary.
args: Arguments to the linking action.
Returns:
depset: C library dependencies to provide as input to the linking action.
"""
# Pick linking context based on linking mode.
if dynamic:
link_ctx = dep_info.cc_dependencies.dynamic_linking
trans_link_ctx = dep_info.transitive_cc_dependencies.dynamic_linking
else:
link_ctx = dep_info.cc_dependencies.static_linking
trans_link_ctx = dep_info.transitive_cc_dependencies.static_linking
# Direct C library dependencies to link.
# I.e. not indirect through another Haskell dependency.
# Such indirect dependencies are linked by GHC based on the extra-libraries
# fields in the dependency's package configuration file.
libs_to_link = link_ctx.libraries_to_link.to_list()
_add_external_libraries(args, libs_to_link)
# Transitive library dependencies to have in scope for linking.
trans_libs_to_link = trans_link_ctx.libraries_to_link.to_list()
# Libraries to pass as inputs to linking action.
cc_link_libs = depset(transitive = [
depset(trans_libs_to_link),
])
# Transitive dynamic library dependencies to have in RUNPATH.
cc_solibs = trans_link_ctx.dynamic_libraries_for_runtime.to_list()
# Collect Haskell dynamic library dependencies in common RUNPATH.
# This is to keep the number of RUNPATH entries low, for faster loading
# and to avoid exceeding the MACH-O header size limit on MacOS.
hs_solibs = []
if dynamic:
hs_solibs_prefix = "_hssolib_%s" % hs.name
for dep in set.to_list(dep_info.dynamic_libraries):
dep_link = hs.actions.declare_file(
paths.join(hs_solibs_prefix, dep.basename),
sibling = binary,
)
ln(hs, dep, dep_link)
hs_solibs.append(dep_link)
# Configure RUNPATH.
rpaths = _infer_rpaths(
hs.toolchain.is_darwin,
binary,
trans_link_ctx.dynamic_libraries_for_runtime.to_list() +
hs_solibs,
)
for rpath in set.to_list(rpaths):
args.add("-optl-Wl,-rpath," + rpath)
return (cc_link_libs, cc_solibs, hs_solibs)
def link_binary(
hs,
cc,
dep_info,
extra_srcs,
compiler_flags,
objects_dir,
dynamic,
with_profiling,
version):
"""Link Haskell binary from static object files.
Returns:
File: produced executable
"""
exe_name = hs.name + (".exe" if hs.toolchain.is_windows else "")
executable = hs.actions.declare_file(exe_name)
args = hs.actions.args()
args.add_all(["-optl" + f for f in cc.linker_flags])
if with_profiling:
args.add("-prof")
args.add_all(hs.toolchain.compiler_flags)
args.add_all(compiler_flags)
# By default, GHC will produce mostly-static binaries, i.e. in which all
# Haskell code is statically linked and foreign libraries and system
# dependencies are dynamically linked. If linkstatic is false, i.e. the user
# has requested fully dynamic linking, we must therefore add flags to make
# sure that GHC dynamically links Haskell code too. The one exception to
# this is when we are compiling for profiling, which currently does not play
# nicely with dynamic linking.
if dynamic:
if with_profiling:
print("WARNING: dynamic linking and profiling don't mix. Omitting -dynamic.\nSee https://ghc.haskell.org/trac/ghc/ticket/15394")
else:
args.add_all(["-pie", "-dynamic"])
# When compiling with `-threaded`, GHC needs to link against
# the pthread library when linking against static archives (.a).
# We assume its not a problem to pass it for other cases,
# so we just default to passing it.
args.add("-optl-pthread")
args.add_all(["-o", executable.path])
# De-duplicate optl calls while preserving ordering: we want last
# invocation of an object to remain last. That is `-optl foo -optl
# bar -optl foo` becomes `-optl bar -optl foo`. Do this by counting
# number of occurrences. That way we only build dict and add to args
# directly rather than doing multiple reversals with temporary
# lists.
args.add_all(pkg_info_to_compile_flags(expose_packages(
dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = None,
custom_package_databases = None,
version = version,
)))
(cc_link_libs, cc_solibs, hs_solibs) = _link_dependencies(
hs = hs,
dep_info = dep_info,
dynamic = dynamic,
binary = executable,
args = args,
)
# XXX: Suppress a warning that Clang prints due to GHC automatically passing
# "-pie" or "-no-pie" to the C compiler.
# This is linked to https://ghc.haskell.org/trac/ghc/ticket/15319
args.add_all([
"-optc-Wno-unused-command-line-argument",
"-optl-Wno-unused-command-line-argument",
])
objects_dir_manifest = _create_objects_dir_manifest(
hs,
objects_dir,
dynamic = dynamic,
with_profiling = with_profiling,
)
extra_linker_flags_file = None
if hs.toolchain.is_darwin:
args.add("-optl-Wl,-headerpad_max_install_names")
# Nixpkgs commit 3513034208a introduces -liconv in NIX_LDFLAGS on
# Darwin. We don't currently handle NIX_LDFLAGS in any special
# way, so a hack is to simply do what NIX_LDFLAGS is telling us we
# should do always when using a toolchain from Nixpkgs.
# TODO remove this gross hack.
args.add("-liconv")
extra_linker_flags_file = _darwin_create_extra_linker_flags_file(
hs,
cc,
objects_dir,
executable,
dynamic,
cc_solibs,
)
if extra_linker_flags_file != None:
params_file = _merge_parameter_files(hs, objects_dir_manifest, extra_linker_flags_file)
else:
params_file = objects_dir_manifest
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = depset(transitive = [
depset(extra_srcs),
set.to_depset(dep_info.package_databases),
set.to_depset(dep_info.dynamic_libraries),
depset(dep_info.static_libraries),
depset(dep_info.static_libraries_prof),
depset([objects_dir]),
cc_link_libs,
]),
outputs = [executable],
mnemonic = "HaskellLinkBinary",
arguments = args,
params_file = params_file,
)
return (executable, cc_solibs + hs_solibs)
def _add_external_libraries(args, ext_libs):
"""Add options to `args` that allow us to link to `ext_libs`.
Args:
args: Args object.
ext_libs: C library dependencies.
"""
# Deduplicate the list of ext_libs based on their
# library name (file name stripped of lib prefix and endings).
# This keeps the command lines short, e.g. when a C library
# like `liblz4.so` appears in multiple dependencies.
# XXX: this is only done in here
# Shouldnt the deduplication be applied to *all* external libraries?
deduped = list.dedup_on(get_lib_name, ext_libs)
for lib in deduped:
args.add_all([
"-L{0}".format(
paths.dirname(lib.path),
),
"-l{0}".format(
# technically this is the second call to get_lib_name,
# but the added clarity makes up for it.
get_lib_name(lib),
),
])
def _infer_rpaths(is_darwin, target, solibs):
"""Return set of RPATH values to be added to target so it can find all
solibs
The resulting paths look like:
$ORIGIN/../../path/to/solib/dir
This means: "go upwards to your runfiles directory, then descend into
the parent folder of the solib".
Args:
is_darwin: Whether we're compiling on and for Darwin.
target: File, executable or library we're linking.
solibs: A list of Files, shared objects that the target needs.
Returns:
Set of strings: rpaths to add to target.
"""
r = set.empty()
if is_darwin:
prefix = "@loader_path"
else:
prefix = "$ORIGIN"
for solib in solibs:
rpath = create_rpath_entry(
binary = target,
dependency = solib,
keep_filename = False,
prefix = prefix,
)
set.mutable_insert(r, rpath)
return r
def _so_extension(hs):
"""Returns the extension for shared libraries.
Args:
hs: Haskell rule context.
Returns:
string of extension.
"""
return "dylib" if hs.toolchain.is_darwin else "so"
def link_library_static(hs, cc, dep_info, objects_dir, my_pkg_id, with_profiling):
"""Link a static library for the package using given object files.
Returns:
File: Produced static library.
"""
static_library = hs.actions.declare_file(
"lib{0}.a".format(pkg_id.library_name(hs, my_pkg_id, prof_suffix = with_profiling)),
)
objects_dir_manifest = _create_objects_dir_manifest(
hs,
objects_dir,
dynamic = False,
with_profiling = with_profiling,
)
args = hs.actions.args()
inputs = [objects_dir, objects_dir_manifest] + cc.files
if hs.toolchain.is_darwin:
# On Darwin, ar doesn't support params files.
args.add_all([
static_library,
objects_dir_manifest.path,
])
# TODO Get ar location from the CC toolchain. This is
# complicated by the fact that the CC toolchain does not
# always use ar, and libtool has an entirely different CLI.
# See https://github.com/bazelbuild/bazel/issues/5127
hs.actions.run_shell(
inputs = inputs,
outputs = [static_library],
mnemonic = "HaskellLinkStaticLibrary",
command = "{ar} qc $1 $(< $2)".format(ar = cc.tools.ar),
arguments = [args],
# Use the default macosx toolchain
env = {"SDKROOT": "macosx"},
)
else:
args.add_all([
"qc",
static_library,
"@" + objects_dir_manifest.path,
])
hs.actions.run(
inputs = inputs,
outputs = [static_library],
mnemonic = "HaskellLinkStaticLibrary",
executable = cc.tools.ar,
arguments = [args],
)
return static_library
def link_library_dynamic(hs, cc, dep_info, extra_srcs, objects_dir, my_pkg_id):
"""Link a dynamic library for the package using given object files.
Returns:
File: Produced dynamic library.
"""
dynamic_library = hs.actions.declare_file(
"lib{0}-ghc{1}.{2}".format(
pkg_id.library_name(hs, my_pkg_id),
hs.toolchain.version,
_so_extension(hs),
),
)
args = hs.actions.args()
args.add_all(["-optl" + f for f in cc.linker_flags])
args.add_all(["-shared", "-dynamic"])
# Work around macOS linker limits. This fix has landed in GHC HEAD, but is
# not yet in a release; plus, we still want to support older versions of
# GHC. For details, see: https://phabricator.haskell.org/D4714
if hs.toolchain.is_darwin:
args.add("-optl-Wl,-dead_strip_dylibs")
args.add_all(pkg_info_to_compile_flags(expose_packages(
dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = None,
custom_package_databases = None,
version = my_pkg_id.version if my_pkg_id else None,
)))
(cc_link_libs, _cc_solibs, _hs_solibs) = _link_dependencies(
hs = hs,
dep_info = dep_info,
dynamic = True,
binary = dynamic_library,
args = args,
)
args.add_all(["-o", dynamic_library.path])
# Profiling not supported for dynamic libraries.
objects_dir_manifest = _create_objects_dir_manifest(
hs,
objects_dir,
dynamic = True,
with_profiling = False,
)
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = depset([objects_dir], transitive = [
depset(extra_srcs),
set.to_depset(dep_info.package_databases),
set.to_depset(dep_info.dynamic_libraries),
cc_link_libs,
]),
outputs = [dynamic_library],
mnemonic = "HaskellLinkDynamicLibrary",
arguments = args,
params_file = objects_dir_manifest,
)
return dynamic_library

View file

@ -0,0 +1,210 @@
"""Action for creating packages and registering them with ghc-pkg"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(":private/path_utils.bzl", "target_unique_name")
load(":private/pkg_id.bzl", "pkg_id")
load(":private/set.bzl", "set")
load(":private/path_utils.bzl", "get_lib_name")
def _get_extra_libraries(dep_info):
"""Get directories and library names for extra library dependencies.
Args:
dep_info: HaskellInfo provider of the package.
Returns:
(dirs, libs):
dirs: list: Library search directories for extra library dependencies.
libs: list: Extra library dependencies.
"""
cc_libs = dep_info.cc_dependencies.dynamic_linking.libraries_to_link.to_list()
# The order in which library dependencies are listed is relevant when
# linking static archives. To maintain the order defined by the input
# depset we collect the library dependencies in a list, and use a separate
# set to deduplicate entries.
seen_libs = set.empty()
extra_libs = []
extra_lib_dirs = set.empty()
for lib in cc_libs:
lib_name = get_lib_name(lib)
if not set.is_member(seen_libs, lib_name):
set.mutable_insert(seen_libs, lib_name)
extra_libs.append(lib_name)
set.mutable_insert(extra_lib_dirs, lib.dirname)
return (set.to_list(extra_lib_dirs), extra_libs)
def package(
hs,
dep_info,
interfaces_dir,
interfaces_dir_prof,
static_library,
dynamic_library,
exposed_modules_file,
other_modules,
my_pkg_id,
static_library_prof):
"""Create GHC package using ghc-pkg.
Args:
hs: Haskell context.
interfaces_dir: Directory containing interface files.
static_library: Static library of the package.
dynamic_library: Dynamic library of the package.
static_library_prof: Static library compiled with profiling or None.
Returns:
(File, File): GHC package conf file, GHC package cache file
"""
pkg_db_dir = pkg_id.to_string(my_pkg_id)
conf_file = hs.actions.declare_file(
paths.join(pkg_db_dir, "{0}.conf".format(pkg_db_dir)),
)
cache_file = hs.actions.declare_file("package.cache", sibling = conf_file)
import_dir = paths.join(
"${pkgroot}",
paths.join(pkg_db_dir, "_iface"),
)
interfaces_dirs = [interfaces_dir]
if interfaces_dir_prof != None:
import_dir_prof = paths.join(
"${pkgroot}",
paths.join(pkg_db_dir, "_iface_prof"),
)
interfaces_dirs.append(interfaces_dir_prof)
else:
import_dir_prof = ""
(extra_lib_dirs, extra_libs) = _get_extra_libraries(dep_info)
metadata_entries = {
"name": my_pkg_id.name,
"version": my_pkg_id.version,
"id": pkg_id.to_string(my_pkg_id),
"key": pkg_id.to_string(my_pkg_id),
"exposed": "True",
"hidden-modules": " ".join(other_modules),
"import-dirs": " ".join([import_dir, import_dir_prof]),
"library-dirs": " ".join(["${pkgroot}"] + extra_lib_dirs),
"dynamic-library-dirs": " ".join(["${pkgroot}"] + extra_lib_dirs),
"hs-libraries": pkg_id.library_name(hs, my_pkg_id),
"extra-libraries": " ".join(extra_libs),
"depends": ", ".join(
# Prebuilt dependencies are added further down, since their
# package-ids are not available as strings but in build outputs.
set.to_list(dep_info.package_ids),
),
}
# Create a file from which ghc-pkg will create the actual package
# from. List of exposed modules generated below.
metadata_file = hs.actions.declare_file(target_unique_name(hs, "metadata"))
hs.actions.write(
output = metadata_file,
content = "\n".join([
"{0}: {1}".format(k, v)
for k, v in metadata_entries.items()
if v
]) + "\n",
)
# Collect the package id files of all prebuilt dependencies.
prebuilt_deps_id_files = [
dep.id_file
for dep in set.to_list(dep_info.prebuilt_dependencies)
]
# Combine exposed modules and other metadata to form the package
# configuration file.
prebuilt_deps_args = hs.actions.args()
prebuilt_deps_args.add_all([f.path for f in prebuilt_deps_id_files])
prebuilt_deps_args.use_param_file("%s", use_always = True)
prebuilt_deps_args.set_param_file_format("multiline")
hs.actions.run_shell(
inputs = [metadata_file, exposed_modules_file] + prebuilt_deps_id_files,
outputs = [conf_file],
command = """
cat $1 > $4
echo "exposed-modules: `cat $2`" >> $4
# this is equivalent to 'readarray'. We do use 'readarray' in order to
# support older bash versions.
while IFS= read -r line; do deps_id_files+=("$line"); done < $3
if [ ${#deps_id_files[@]} -eq 0 ]; then
deps=""
else
deps=$(cat "${deps_id_files[@]}" | tr '\n' " ")
fi
echo "depends: $deps" >> $4
""",
arguments = [
metadata_file.path,
exposed_modules_file.path,
prebuilt_deps_args,
conf_file.path,
],
use_default_shell_env = True,
)
# Make the call to ghc-pkg and use the package configuration file
package_path = ":".join([c.dirname for c in set.to_list(dep_info.package_databases)]) + ":"
hs.actions.run(
inputs = depset(transitive = [
set.to_depset(dep_info.package_databases),
depset(interfaces_dirs),
depset([
input
for input in [
static_library,
conf_file,
dynamic_library,
static_library_prof,
]
if input
]),
]),
outputs = [cache_file],
env = {
"GHC_PACKAGE_PATH": package_path,
},
mnemonic = "HaskellRegisterPackage",
progress_message = "HaskellRegisterPackage {}".format(hs.label),
executable = hs.tools.ghc_pkg,
# Registration of a new package consists in,
#
# 1. copying the registration file into the package db,
# 2. performing some validation on the registration file content,
# 3. recaching, i.e. regenerating the package db cache file.
#
# Normally, this is all done by `ghc-pkg register`. But in our
# case, `ghc-pkg register` is painful, because the validation
# it performs is slow, somewhat redundant but especially, too
# strict (see e.g.
# https://ghc.haskell.org/trac/ghc/ticket/15478). So we do (1)
# and (3) manually, by copying then calling `ghc-pkg recache`
# directly.
#
# The downside is that we do lose the few validations that
# `ghc-pkg register` was doing that was useful. e.g. when
# reexporting modules, validation checks that the source
# module does exist.
#
# TODO Go back to using `ghc-pkg register`. Blocked by
# https://ghc.haskell.org/trac/ghc/ticket/15478
arguments = [
"recache",
"--package-db={0}".format(conf_file.dirname),
"-v0",
"--no-expand-pkgroot",
],
# XXX: Seems required for this to work on Windows
use_default_shell_env = True,
)
return conf_file, cache_file

View file

@ -0,0 +1,175 @@
"""GHCi REPL support"""
load(":private/context.bzl", "render_env")
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"link_libraries",
"ln",
"target_unique_name",
)
load(":providers.bzl", "get_libs_for_ghc_linker")
load(
":private/set.bzl",
"set",
)
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//lib:shell.bzl", "shell")
def build_haskell_repl(
hs,
ghci_script,
ghci_repl_wrapper,
user_compile_flags,
repl_ghci_args,
hs_info,
output,
package_databases,
version,
lib_info = None):
"""Build REPL script.
Args:
hs: Haskell context.
hs_info: HaskellInfo.
package_databases: package caches excluding the cache file of the package
we're creating a REPL for.
lib_info: If we're building REPL for a library target, pass
HaskellLibraryInfo here, otherwise it should be None.
Returns:
None.
"""
# The base and directory packages are necessary for the GHCi script we use
# (loads source files and brings in scope the corresponding modules).
args = ["-package", "base", "-package", "directory"]
pkg_ghc_info = expose_packages(
hs_info,
lib_info,
use_direct = False,
use_my_pkg_id = None,
custom_package_databases = package_databases,
version = version,
)
args += pkg_info_to_compile_flags(pkg_ghc_info)
lib_imports = []
if lib_info != None:
for idir in set.to_list(hs_info.import_dirs):
args += ["-i{0}".format(idir)]
lib_imports.append(idir)
link_ctx = hs_info.cc_dependencies.dynamic_linking
libs_to_link = link_ctx.dynamic_libraries_for_runtime.to_list()
# External C libraries that we need to make available to the REPL.
libraries = link_libraries(libs_to_link, args)
# Transitive library dependencies to have in runfiles.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
hs_info.transitive_cc_dependencies,
path_prefix = "$RULES_HASKELL_EXEC_ROOT",
)
library_path = [paths.dirname(lib.path) for lib in library_deps]
ld_library_path = [paths.dirname(lib.path) for lib in ld_library_deps]
repl_file = hs.actions.declare_file(target_unique_name(hs, "repl"))
add_sources = ["*" + f.path for f in set.to_list(hs_info.source_files)]
ghci_repl_script = hs.actions.declare_file(
target_unique_name(hs, "ghci-repl-script"),
)
hs.actions.expand_template(
template = ghci_script,
output = ghci_repl_script,
substitutions = {
"{ADD_SOURCES}": " ".join(add_sources),
"{COMMANDS}": "",
},
)
# Extra arguments.
# `compiler flags` is the default set of arguments for the repl,
# augmented by `repl_ghci_args`.
# The ordering is important, first compiler flags (from toolchain
# and local rule), then from `repl_ghci_args`. This way the more
# specific arguments are listed last, and then have more priority in
# GHC.
# Note that most flags for GHCI do have their negative value, so a
# negative flag in `repl_ghci_args` can disable a positive flag set
# in `user_compile_flags`, such as `-XNoOverloadedStrings` will disable
# `-XOverloadedStrings`.
args += hs.toolchain.compiler_flags + user_compile_flags + hs.toolchain.repl_ghci_args + repl_ghci_args
hs.actions.expand_template(
template = ghci_repl_wrapper,
output = repl_file,
substitutions = {
"{ENV}": render_env(ghc_env),
"{TOOL}": hs.tools.ghci.path,
"{ARGS}": " ".join(
[
"-ghci-script",
paths.join("$RULES_HASKELL_EXEC_ROOT", ghci_repl_script.path),
] + [
shell.quote(a)
for a in args
],
),
},
is_executable = True,
)
ghc_info = struct(
has_version = pkg_ghc_info.has_version,
library_path = library_path,
ld_library_path = ld_library_path,
packages = pkg_ghc_info.packages,
package_ids = pkg_ghc_info.package_ids,
package_dbs = pkg_ghc_info.package_dbs,
lib_imports = lib_imports,
libraries = libraries,
execs = struct(
ghc = hs.tools.ghc.path,
ghci = hs.tools.ghci.path,
runghc = hs.tools.runghc.path,
),
flags = struct(
compiler = user_compile_flags,
toolchain_compiler = hs.toolchain.compiler_flags,
repl = repl_ghci_args,
toolchain_repl = hs.toolchain.repl_ghci_args,
),
)
ghc_info_file = hs.actions.declare_file(
target_unique_name(hs, "ghc-info"),
)
hs.actions.write(
output = ghc_info_file,
content = ghc_info.to_json(),
)
# XXX We create a symlink here because we need to force
# hs.tools.ghci and ghci_script and the best way to do that is
# to use hs.actions.run. That action, in turn must produce
# a result, so using ln seems to be the only sane choice.
extra_inputs = depset(transitive = [
depset([
hs.tools.ghci,
ghci_repl_script,
repl_file,
ghc_info_file,
]),
set.to_depset(package_databases),
depset(library_deps),
depset(ld_library_deps),
set.to_depset(hs_info.source_files),
])
ln(hs, repl_file, output, extra_inputs)

View file

@ -0,0 +1,115 @@
"""runghc support"""
load(":private/context.bzl", "render_env")
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"link_libraries",
"ln",
"target_unique_name",
)
load(
":private/set.bzl",
"set",
)
load(":providers.bzl", "get_libs_for_ghc_linker")
load("@bazel_skylib//lib:shell.bzl", "shell")
def build_haskell_runghc(
hs,
runghc_wrapper,
user_compile_flags,
extra_args,
hs_info,
output,
package_databases,
version,
lib_info = None):
"""Build runghc script.
Args:
hs: Haskell context.
hs_info: HaskellInfo.
package_databases: package caches excluding the cache file of the package
we're creating a runghc for.
lib_info: If we're building runghc for a library target, pass
HaskellLibraryInfo here, otherwise it should be None.
Returns:
None.
"""
args = pkg_info_to_compile_flags(expose_packages(
hs_info,
lib_info,
use_direct = False,
use_my_pkg_id = None,
custom_package_databases = package_databases,
version = version,
))
if lib_info != None:
for idir in set.to_list(hs_info.import_dirs):
args += ["-i{0}".format(idir)]
link_ctx = hs_info.cc_dependencies.dynamic_linking
libs_to_link = link_ctx.dynamic_libraries_for_runtime.to_list()
# External C libraries that we need to make available to runghc.
link_libraries(libs_to_link, args)
# Transitive library dependencies to have in runfiles.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
hs_info.transitive_cc_dependencies,
path_prefix = "$RULES_HASKELL_EXEC_ROOT",
)
runghc_file = hs.actions.declare_file(target_unique_name(hs, "runghc"))
# Extra arguments.
# `compiler flags` is the default set of arguments for runghc,
# augmented by `extra_args`.
# The ordering is important, first compiler flags (from toolchain
# and local rule), then from `extra_args`. This way the more
# specific arguments are listed last, and then have more priority in
# GHC.
# Note that most flags for GHCI do have their negative value, so a
# negative flag in `extra_args` can disable a positive flag set
# in `user_compile_flags`, such as `-XNoOverloadedStrings` will disable
# `-XOverloadedStrings`.
args += hs.toolchain.compiler_flags + user_compile_flags + hs.toolchain.repl_ghci_args
# ghc args need to be wrapped up in "--ghc-arg=" when passing to runghc
runcompile_flags = ["--ghc-arg=%s" % a for a in args]
runcompile_flags += extra_args
hs.actions.expand_template(
template = runghc_wrapper,
output = runghc_file,
substitutions = {
"{ENV}": render_env(ghc_env),
"{TOOL}": hs.tools.runghc.path,
"{ARGS}": " ".join([shell.quote(a) for a in runcompile_flags]),
},
is_executable = True,
)
# XXX We create a symlink here because we need to force
# hs.tools.runghc and the best way to do that is
# to use hs.actions.run. That action, in turn must produce
# a result, so using ln seems to be the only sane choice.
extra_inputs = depset(transitive = [
depset([
hs.tools.runghc,
runghc_file,
]),
set.to_depset(package_databases),
depset(library_deps),
depset(ld_library_deps),
set.to_depset(hs_info.source_files),
])
ln(hs, runghc_file, output, extra_inputs)

View file

@ -0,0 +1,64 @@
"""Derived context with Haskell-specific fields and methods"""
load("@bazel_skylib//lib:paths.bzl", "paths")
HaskellContext = provider()
def haskell_context(ctx, attr = None):
toolchain = ctx.toolchains["@io_tweag_rules_haskell//haskell:toolchain"]
if not attr:
attr = ctx.attr
if hasattr(attr, "src_strip_prefix"):
src_strip_prefix = attr.src_strip_prefix
else:
src_strip_prefix = ""
src_root = paths.join(
ctx.label.workspace_root,
ctx.label.package,
src_strip_prefix,
)
env = {
"LANG": toolchain.locale,
}
if toolchain.locale_archive != None:
env["LOCALE_ARCHIVE"] = toolchain.locale_archive.path
coverage_enabled = False
if hasattr(ctx, "configuration"):
coverage_enabled = ctx.configuration.coverage_enabled
return HaskellContext(
# Fields
name = attr.name,
label = ctx.label,
toolchain = toolchain,
tools = toolchain.tools,
src_root = src_root,
package_root = ctx.label.workspace_root + ctx.label.package,
env = env,
mode = ctx.var["COMPILATION_MODE"],
actions = ctx.actions,
bin_dir = ctx.bin_dir,
genfiles_dir = ctx.genfiles_dir,
coverage_enabled = coverage_enabled,
)
def render_env(env):
"""Render environment dict to shell exports.
Example:
>>> render_env({"PATH": "foo:bar", "LANG": "lang"})
export PATH=foo:bar
export LANG=lang
"""
return "\n".join([
"export {}={}".format(k, v)
for k, v in env.items()
])

View file

@ -0,0 +1,128 @@
#!/usr/bin/env bash
# A wrapper for Haskell binaries which have been instrumented for hpc code coverage.
# Copy-pasted from Bazel's Bash runfiles library (tools/bash/runfiles/runfiles.bash).
set -euo pipefail
if [[ ! -d "${RUNFILES_DIR:-/dev/null}" && ! -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
if [[ -f "$0.runfiles_manifest" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.runfiles/MANIFEST" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles/MANIFEST"
elif [[ -f "$0.runfiles/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
export RUNFILES_DIR="$0.runfiles"
fi
fi
if [[ -f "${RUNFILES_DIR:-/dev/null}/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
source "${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash"
elif [[ -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
source "$(grep -m1 "^bazel_tools/tools/bash/runfiles/runfiles.bash " \
"$RUNFILES_MANIFEST_FILE" | cut -d ' ' -f 2-)"
else
echo >&2 "ERROR: cannot find @bazel_tools//tools/bash/runfiles:runfiles.bash"
exit 1
fi
# --- end runfiles.bash initialization ---
ERRORCOLOR='\033[1;31m'
CLEARCOLOR='\033[0m'
binary_path=$(rlocation {binary_path})
hpc_path=$(rlocation {hpc_path})
tix_file_path={tix_file_path}
coverage_report_format={coverage_report_format}
strict_coverage_analysis={strict_coverage_analysis}
package_path={package_path}
# either of the two expected coverage metrics should be set to -1 if they're meant to be unused
expected_covered_expressions_percentage={expected_covered_expressions_percentage}
expected_uncovered_expression_count={expected_uncovered_expression_count}
# gather the hpc directories
hpc_dir_args=""
mix_file_paths={mix_file_paths}
for m in "${mix_file_paths[@]}"
do
absolute_mix_file_path=$(rlocation $m)
hpc_parent_dir=$(dirname $absolute_mix_file_path)
trimmed_hpc_parent_dir=$(echo "${hpc_parent_dir%%.hpc*}")
hpc_dir_args="$hpc_dir_args --hpcdir=$trimmed_hpc_parent_dir.hpc"
done
# gather the modules to exclude from the coverage analysis
hpc_exclude_args=""
modules_to_exclude={modules_to_exclude}
for m in "${modules_to_exclude[@]}"
do
hpc_exclude_args="$hpc_exclude_args --exclude=$m"
done
# run the test binary, and then generate the report
$binary_path "$@" > /dev/null 2>&1
$hpc_path report "$tix_file_path" $hpc_dir_args $hpc_exclude_args \
--srcdir "." --srcdir "$package_path" > __hpc_coverage_report
# if we want a text report, just output the file generated in the previous step
if [ "$coverage_report_format" == "text" ]
then
echo "Overall report"
cat __hpc_coverage_report
fi
# check the covered expression percentage, and if it matches our expectations
if [ "$expected_covered_expressions_percentage" -ne -1 ]
then
covered_expression_percentage=$(grep "expressions used" __hpc_coverage_report | cut -c 1-3)
if [ "$covered_expression_percentage" -lt "$expected_covered_expressions_percentage" ]
then
echo -e "\n==>$ERRORCOLOR Inadequate expression coverage percentage.$CLEARCOLOR"
echo -e "==> Expected $expected_covered_expressions_percentage%, but the actual coverage was $ERRORCOLOR$(($covered_expression_percentage))%$CLEARCOLOR.\n"
exit 1
elif [ "$strict_coverage_analysis" == "True" ] && [ "$covered_expression_percentage" -gt "$expected_covered_expressions_percentage" ]
then
echo -e "\n==>$ERRORCOLOR ** BECAUSE STRICT COVERAGE ANALYSIS IS ENABLED **$CLEARCOLOR"
echo -e "==> Your coverage percentage is now higher than expected.$CLEARCOLOR"
echo -e "==> Expected $expected_covered_expressions_percentage% of expressions covered, but the actual value is $ERRORCOLOR$(($covered_expression_percentage))%$CLEARCOLOR."
echo -e "==> Please increase the expected coverage percentage to match.\n"
exit 1
fi
fi
# check how many uncovered expressions there are, and if that number matches our expectations
if [ "$expected_uncovered_expression_count" -ne -1 ]
then
coverage_numerator=$(grep "expressions used" __hpc_coverage_report | sed s:.*\(::g | cut -f1 -d "/")
coverage_denominator=$(grep "expressions used" __hpc_coverage_report | sed s:.*/::g | cut -f1 -d ")")
uncovered_expression_count="$(($coverage_denominator - $coverage_numerator))"
if [ "$uncovered_expression_count" -gt "$expected_uncovered_expression_count" ]
then
echo -e "\n==>$ERRORCOLOR Too many uncovered expressions.$CLEARCOLOR"
echo -e "==> Expected $expected_uncovered_expression_count uncovered expressions, but the actual count was $ERRORCOLOR$(($uncovered_expression_count))$CLEARCOLOR.\n"
exit 1
elif [ "$strict_coverage_analysis" == "True" ] && [ "$uncovered_expression_count" -lt "$expected_uncovered_expression_count" ]
then
echo -e "\n==>$ERRORCOLOR ** BECAUSE STRICT COVERAGE ANALYSIS IS ENABLED **$CLEARCOLOR"
echo -e "==>$ERRORCOLOR Your uncovered expression count is now lower than expected.$CLEARCOLOR"
echo -e "==> Expected $expected_uncovered_expression_count uncovered expressions, but there is $ERRORCOLOR$(($uncovered_expression_count))$CLEARCOLOR."
echo -e "==> Please lower the expected uncovered expression count to match.\n"
exit 1
fi
fi
# if we want an html report, run the hpc binary again with the "markup" command,
# and feed its generated files into stdout, wrapped in XML tags
if [ "$coverage_report_format" == "html" ]
then
$hpc_path markup "$tix_file_path" $hpc_dir_args $hpc_exclude_args \
--srcdir "." --srcdir "$package_path" --destdir=hpc_out > /dev/null 2>&1
cd hpc_out
echo "COVERAGE REPORT BELOW"
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
for file in *.html **/*.hs.html; do
[ -e "$file" ] || continue
echo "<coverage-report-part name=\"$file\">"
echo '<![CDATA['
cat $file
echo ']]>'
echo "</coverage-report-part>"
done
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
fi

View file

@ -0,0 +1,222 @@
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"HaskellCcInfo",
"HaskellInfo",
"HaskellLibraryInfo",
"HaskellPrebuiltPackageInfo",
"empty_HaskellCcInfo",
"merge_HaskellCcInfo",
)
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"is_static_library",
"ln",
)
load(":private/set.bzl", "set")
def _cc_get_static_lib(lib_info):
"""Return the library to use in static linking mode.
This returns the first available library artifact in the following order:
- static_library
- pic_static_library
- dynamic_library
- interface_library
Args:
lib_info: LibraryToLink provider.
Returns:
File: The library to link against in static mode.
"""
if lib_info.static_library:
return lib_info.static_library
elif lib_info.pic_static_library:
return lib_info.pic_static_library
elif lib_info.dynamic_library:
return lib_info.dynamic_library
else:
return lib_info.interface_library
def _cc_get_dynamic_lib(lib_info):
"""Return the library to use in dynamic linking mode.
This returns the first available library artifact in the following order:
- dynamic_library
- interface_library
- pic_static_library
- static_library
Args:
lib_info: LibraryToLink provider.
Returns:
File: The library to link against in dynamic mode.
"""
if lib_info.dynamic_library:
return lib_info.dynamic_library
elif lib_info.interface_library:
return lib_info.interface_library
elif lib_info.pic_static_library:
return lib_info.pic_static_library
else:
return lib_info.static_library
def _HaskellCcInfo_from_CcInfo(ctx, cc_info):
libs_to_link = cc_info.linking_context.libraries_to_link
static_libs_to_link = []
dynamic_libs_to_link = []
static_libs_for_runtime = []
dynamic_libs_for_runtime = []
for l in libs_to_link:
_static_lib = _cc_get_static_lib(l)
dynamic_lib = _cc_get_dynamic_lib(l)
# Bazel itself only mangles dynamic libraries, not static libraries.
# However, we need the library name of the static and dynamic version
# of a library to match so that we can refer to both with one entry in
# the package configuration file. Here we rename any static archives
# with mismatching mangled dynamic library name.
static_name = get_lib_name(_static_lib)
dynamic_name = get_lib_name(dynamic_lib)
if static_name != dynamic_name:
ext = _static_lib.extension
static_lib = ctx.actions.declare_file(
"lib%s.%s" % (dynamic_name, ext),
)
ln(ctx, _static_lib, static_lib)
else:
static_lib = _static_lib
static_libs_to_link.append(static_lib)
if is_shared_library(static_lib):
static_libs_for_runtime.append(static_lib)
dynamic_libs_to_link.append(dynamic_lib)
if is_shared_library(dynamic_lib):
dynamic_libs_for_runtime.append(dynamic_lib)
return HaskellCcInfo(
static_linking = struct(
libraries_to_link = depset(
direct = static_libs_to_link,
order = "topological",
),
dynamic_libraries_for_runtime = depset(
direct = static_libs_for_runtime,
order = "topological",
),
user_link_flags = depset(
direct = cc_info.linking_context.user_link_flags,
order = "topological",
),
),
dynamic_linking = struct(
libraries_to_link = depset(
direct = dynamic_libs_to_link,
order = "topological",
),
dynamic_libraries_for_runtime = depset(
direct = dynamic_libs_for_runtime,
order = "topological",
),
user_link_flags = depset(
direct = cc_info.linking_context.user_link_flags,
order = "topological",
),
),
)
def gather_dep_info(ctx, deps):
"""Collapse dependencies into a single `HaskellInfo`.
Note that the field `prebuilt_dependencies` also includes
prebuilt_dependencies of current target.
Args:
ctx: Rule context.
deps: deps attribute.
Returns:
HaskellInfo: Unified information about all dependencies.
"""
acc = HaskellInfo(
package_ids = set.empty(),
package_databases = set.empty(),
version_macros = set.empty(),
static_libraries = [],
static_libraries_prof = [],
dynamic_libraries = set.empty(),
interface_dirs = set.empty(),
prebuilt_dependencies = set.empty(),
direct_prebuilt_deps = set.empty(),
cc_dependencies = empty_HaskellCcInfo(),
transitive_cc_dependencies = empty_HaskellCcInfo(),
)
for dep in deps:
if HaskellInfo in dep:
binfo = dep[HaskellInfo]
package_ids = acc.package_ids
if HaskellLibraryInfo not in dep:
fail("Target {0} cannot depend on binary".format(ctx.attr.name))
if HaskellLibraryInfo in dep:
set.mutable_insert(package_ids, dep[HaskellLibraryInfo].package_id)
acc = HaskellInfo(
package_ids = package_ids,
package_databases = set.mutable_union(acc.package_databases, binfo.package_databases),
version_macros = set.mutable_union(acc.version_macros, binfo.version_macros),
static_libraries = acc.static_libraries + binfo.static_libraries,
static_libraries_prof = acc.static_libraries_prof + binfo.static_libraries_prof,
dynamic_libraries = set.mutable_union(acc.dynamic_libraries, binfo.dynamic_libraries),
interface_dirs = set.mutable_union(acc.interface_dirs, binfo.interface_dirs),
prebuilt_dependencies = set.mutable_union(acc.prebuilt_dependencies, binfo.prebuilt_dependencies),
direct_prebuilt_deps = acc.direct_prebuilt_deps,
cc_dependencies = acc.cc_dependencies,
transitive_cc_dependencies = merge_HaskellCcInfo(acc.transitive_cc_dependencies, binfo.transitive_cc_dependencies),
)
elif HaskellPrebuiltPackageInfo in dep:
pkg = dep[HaskellPrebuiltPackageInfo]
acc = HaskellInfo(
package_ids = acc.package_ids,
package_databases = acc.package_databases,
version_macros = set.mutable_insert(acc.version_macros, pkg.version_macros_file),
static_libraries = acc.static_libraries,
static_libraries_prof = acc.static_libraries_prof,
dynamic_libraries = acc.dynamic_libraries,
interface_dirs = acc.interface_dirs,
prebuilt_dependencies = set.mutable_insert(acc.prebuilt_dependencies, pkg),
direct_prebuilt_deps = set.mutable_insert(acc.direct_prebuilt_deps, pkg),
cc_dependencies = acc.cc_dependencies,
transitive_cc_dependencies = acc.transitive_cc_dependencies,
)
elif CcInfo in dep and HaskellInfo not in dep:
# The final link of a binary must include all static libraries we
# depend on, including transitives ones. Theses libs are provided
# in the `CcInfo` provider.
hs_cc_info = _HaskellCcInfo_from_CcInfo(ctx, dep[CcInfo])
acc = HaskellInfo(
package_ids = acc.package_ids,
package_databases = acc.package_databases,
version_macros = acc.version_macros,
static_libraries = acc.static_libraries,
static_libraries_prof = acc.static_libraries_prof,
dynamic_libraries = acc.dynamic_libraries,
interface_dirs = acc.interface_dirs,
prebuilt_dependencies = acc.prebuilt_dependencies,
direct_prebuilt_deps = acc.direct_prebuilt_deps,
cc_dependencies = merge_HaskellCcInfo(
acc.cc_dependencies,
hs_cc_info,
),
transitive_cc_dependencies = merge_HaskellCcInfo(
acc.transitive_cc_dependencies,
hs_cc_info,
),
)
return acc

View file

@ -0,0 +1,59 @@
#!/usr/bin/env bash
#
# Usage: ghci_repl_wrapper.sh <ARGS>
# this variable is set by `bazel run`
if [ "$BUILD_WORKSPACE_DIRECTORY" = "" ]
then
cat <<EOF
It looks like you are trying to invoke the REPL incorrectly.
We only support calling the repl script with
$ bazel run <target>
for now.
If you are on bazel < 0.15 you must invoke as follows:
$ bazel run --direct_run <target>
EOF
exit 1
fi
# Derived from Bazel's Bash runfiles library (tools/bash/runfiles/runfiles.bash).
if [[ -z "$RUNFILES_DIR" ]]; then
if [[ -d "$0.runfiles" ]]; then
export RUNFILES_DIR="$0.runfiles"
fi
fi
if [[ -z "$RUNFILES_MANIFEST_FILE" ]]; then
if [[ -f "$0.runfiles_manifest" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.runfiles/MANIFEST" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles/MANIFEST"
fi
fi
# GHCi script and libraries are loaded relative to workspace directory.
# bazel run //some:target@repl will be executed from the workspace directory.
# bazel run //some:haskell_repl will be executed from its execroot.
# Explicitly change into the workspace root in that case.
cd "$BUILD_WORKSPACE_DIRECTORY"
# This is a workaround for https://github.com/bazelbuild/bazel/issues/5506
# and also for the fact that REPL script relies on so-called “convenience
# links” and the names of those links are controlled by the --symlink_prefix
# option, which can be set by the user to something unpredictable.
#
# It seems that we can't locate the files of interest/build outputs in
# general. However, due to “internal issues” in Bazel mentioned e.g.
# https://github.com/bazelbuild/bazel/issues/3796, the directory bazel-out
# is always created under the workspace directory. We exploit this to get
# location of exec root reliably and then prefix locations of various
# components, such as shared libraries with that exec root.
RULES_HASKELL_EXEC_ROOT=$(dirname $(readlink ${BUILD_WORKSPACE_DIRECTORY}/bazel-out))
TOOL_LOCATION="$RULES_HASKELL_EXEC_ROOT/{TOOL}"
{ENV}
"$TOOL_LOCATION" {ARGS} "$@"

View file

@ -0,0 +1,49 @@
#!/usr/bin/env bash
#
# Usage: haddock-wrapper.sh <PREBUILD_DEPS_FILE> <HADDOCK_ARGS>
set -eo pipefail
%{env}
PREBUILT_DEPS_FILE=$1
shift
extra_args=()
for pkg in $(< $PREBUILT_DEPS_FILE)
do
# Assumption: the `haddock-interfaces` field always only contains
# exactly one file name. This seems to hold in practice, though the
# ghc documentation defines it as:
# > (string list) A list of filenames containing Haddock interface files
# > (.haddock files) for this package.
# If there were more than one file, going by the output for the `depends`,
# the file names would be separated by a space character.
# https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/packages.html#installedpackageinfo-a-package-specification
haddock_interfaces=$(%{ghc-pkg} --simple-output field $pkg haddock-interfaces)
haddock_html=$(%{ghc-pkg} --simple-output field $pkg haddock-html)
# Sometimes the referenced `.haddock` file does not exist
# (e.g. for `nixpkgs.haskellPackages` deps with haddock disabled).
# In that case, skip this package with a warning.
if [[ -f "$haddock_interfaces" ]]
then
# TODO: link source code,
# `--read-interface=$haddock_html,$pkg_src,$haddock_interfaces
# https://haskell-haddock.readthedocs.io/en/latest/invoking.html#cmdoption-read-interface
extra_args+=("--read-interface=$haddock_html,$haddock_interfaces")
else
echo "Warning: haddock missing for package $pkg" 1>&2
fi
done
# BSD and GNU mktemp are very different; attempt GNU first
TEMP=$(mktemp -d 2>/dev/null || mktemp -d -t 'haddock_wrapper')
trap cleanup 1 2 3 6
cleanup() { rmdir "$TEMP"; }
# XXX Override TMPDIR to prevent race conditions on certain platforms.
# This is a workaround for
# https://github.com/haskell/haddock/issues/894.
TMPDIR=$TEMP %{haddock} "${extra_args[@]}" "$@"
cleanup

View file

@ -0,0 +1,668 @@
"""Implementation of core Haskell rules"""
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"C2hsLibraryInfo",
"HaskellInfo",
"HaskellLibraryInfo",
"HaskellPrebuiltPackageInfo",
)
load(":cc.bzl", "cc_interop_info")
load(
":private/actions/link.bzl",
"link_binary",
"link_library_dynamic",
"link_library_static",
)
load(":private/actions/package.bzl", "package")
load(":private/actions/repl.bzl", "build_haskell_repl")
load(":private/actions/runghc.bzl", "build_haskell_runghc")
load(":private/context.bzl", "haskell_context")
load(":private/dependencies.bzl", "gather_dep_info")
load(":private/java.bzl", "java_interop_info")
load(":private/mode.bzl", "is_profiling_enabled")
load(
":private/path_utils.bzl",
"ln",
"match_label",
"parse_pattern",
"target_unique_name",
)
load(":private/pkg_id.bzl", "pkg_id")
load(":private/set.bzl", "set")
load(":private/version_macros.bzl", "generate_version_macros")
load(":providers.bzl", "GhcPluginInfo", "HaskellCoverageInfo")
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//lib:collections.bzl", "collections")
load("@bazel_skylib//lib:shell.bzl", "shell")
def _prepare_srcs(srcs):
srcs_files = []
import_dir_map = {}
for src in srcs:
# If it has the "files" attribute, it must be a Target
if hasattr(src, "files"):
if C2hsLibraryInfo in src:
srcs_files += src.files.to_list()
for f in src.files.to_list():
import_dir_map[f] = src[C2hsLibraryInfo].import_dir
else:
srcs_files += src.files.to_list()
# otherwise it's just a file
else:
srcs_files.append(src)
return srcs_files, import_dir_map
def haskell_test_impl(ctx):
return _haskell_binary_common_impl(ctx, is_test = True)
def haskell_binary_impl(ctx):
return _haskell_binary_common_impl(ctx, is_test = False)
def _should_inspect_coverage(ctx, hs, is_test):
return hs.coverage_enabled and is_test
def _coverage_enabled_for_target(coverage_source_patterns, label):
for pat in coverage_source_patterns:
if match_label(pat, label):
return True
return False
# Mix files refer to genfile srcs including their root. Therefore, we
# must condition the src filepaths passed in for coverage to match.
def _condition_coverage_src(hs, src):
if not src.path.startswith(hs.genfiles_dir.path):
return src
""" Genfiles have the genfile directory as part of their path,
so declaring a file with the sample path actually makes the new
file double-qualified by the genfile directory.
This is necessary because mix files capture the genfile
path before compilation, and then expect those files to be
qualified by the genfile directory when `hpc report` or
`hpc markup` are used. But, genfiles included as runfiles
are no longer qualified. So, double-qualifying them results in
only one level of qualification as runfiles.
"""
conditioned_src = hs.actions.declare_file(src.path)
hs.actions.run_shell(
inputs = [src],
outputs = [conditioned_src],
arguments = [
src.path,
conditioned_src.path,
],
command = """
mkdir -p $(dirname "$2") && cp "$1" "$2"
""",
)
return conditioned_src
def _haskell_binary_common_impl(ctx, is_test):
hs = haskell_context(ctx)
dep_info = gather_dep_info(ctx, ctx.attr.deps)
plugin_dep_info = gather_dep_info(
ctx,
[dep for plugin in ctx.attr.plugins for dep in plugin[GhcPluginInfo].deps],
)
# Add any interop info for other languages.
cc = cc_interop_info(ctx)
java = java_interop_info(ctx)
with_profiling = is_profiling_enabled(hs)
srcs_files, import_dir_map = _prepare_srcs(ctx.attr.srcs)
inspect_coverage = _should_inspect_coverage(ctx, hs, is_test)
c = hs.toolchain.actions.compile_binary(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs = srcs_files,
ls_modules = ctx.executable._ls_modules,
import_dir_map = import_dir_map,
extra_srcs = depset(ctx.files.extra_srcs),
user_compile_flags = ctx.attr.compiler_flags,
dynamic = False if hs.toolchain.is_windows else not ctx.attr.linkstatic,
with_profiling = False,
main_function = ctx.attr.main_function,
version = ctx.attr.version,
inspect_coverage = inspect_coverage,
plugins = ctx.attr.plugins,
)
# gather intermediary code coverage instrumentation data
coverage_data = c.coverage_data
for dep in ctx.attr.deps:
if HaskellCoverageInfo in dep:
coverage_data += dep[HaskellCoverageInfo].coverage_data
c_p = None
if with_profiling:
c_p = hs.toolchain.actions.compile_binary(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs = srcs_files,
ls_modules = ctx.executable._ls_modules,
import_dir_map = import_dir_map,
# NOTE We must make the object files compiled without profiling
# available to this step for TH to work, presumably because GHC is
# linked against RTS without profiling.
extra_srcs = depset(transitive = [
depset(ctx.files.extra_srcs),
depset([c.objects_dir]),
]),
user_compile_flags = ctx.attr.compiler_flags,
# NOTE We can't have profiling and dynamic code at the
# same time, see:
# https://ghc.haskell.org/trac/ghc/ticket/15394
dynamic = False,
with_profiling = True,
main_function = ctx.attr.main_function,
version = ctx.attr.version,
plugins = ctx.attr.plugins,
)
(binary, solibs) = link_binary(
hs,
cc,
dep_info,
ctx.files.extra_srcs,
ctx.attr.compiler_flags,
c_p.objects_dir if with_profiling else c.objects_dir,
dynamic = False if hs.toolchain.is_windows else not ctx.attr.linkstatic,
with_profiling = with_profiling,
version = ctx.attr.version,
)
hs_info = HaskellInfo(
package_ids = dep_info.package_ids,
package_databases = dep_info.package_databases,
version_macros = set.empty(),
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
static_libraries = dep_info.static_libraries,
static_libraries_prof = dep_info.static_libraries_prof,
dynamic_libraries = dep_info.dynamic_libraries,
interface_dirs = dep_info.interface_dirs,
compile_flags = c.compile_flags,
prebuilt_dependencies = dep_info.prebuilt_dependencies,
cc_dependencies = dep_info.cc_dependencies,
transitive_cc_dependencies = dep_info.transitive_cc_dependencies,
)
cc_info = cc_common.merge_cc_infos(
cc_infos = [dep[CcInfo] for dep in ctx.attr.deps if CcInfo in dep],
)
target_files = depset([binary])
build_haskell_repl(
hs,
ghci_script = ctx.file._ghci_script,
ghci_repl_wrapper = ctx.file._ghci_repl_wrapper,
user_compile_flags = ctx.attr.compiler_flags,
repl_ghci_args = ctx.attr.repl_ghci_args,
output = ctx.outputs.repl,
package_databases = dep_info.package_databases,
version = ctx.attr.version,
hs_info = hs_info,
)
# XXX Temporary backwards compatibility hack. Remove eventually.
# See https://github.com/tweag/rules_haskell/pull/460.
ln(hs, ctx.outputs.repl, ctx.outputs.repl_deprecated)
build_haskell_runghc(
hs,
runghc_wrapper = ctx.file._ghci_repl_wrapper,
extra_args = ctx.attr.runcompile_flags,
user_compile_flags = ctx.attr.compiler_flags,
output = ctx.outputs.runghc,
package_databases = dep_info.package_databases,
version = ctx.attr.version,
hs_info = hs_info,
)
executable = binary
extra_runfiles = []
if inspect_coverage:
binary_path = paths.join(ctx.workspace_name, binary.short_path)
hpc_path = paths.join(ctx.workspace_name, hs.toolchain.tools.hpc.short_path)
tix_file_path = hs.label.name + ".tix"
mix_file_paths = [
paths.join(ctx.workspace_name, datum.mix_file.short_path)
for datum in coverage_data
]
mix_file_paths = collections.uniq(mix_file_paths) # remove duplicates
# find which modules to exclude from coverage analysis, by using the specified source patterns
raw_coverage_source_patterns = ctx.attr.experimental_coverage_source_patterns
coverage_source_patterns = [parse_pattern(ctx, pat) for pat in raw_coverage_source_patterns]
modules_to_exclude = [paths.split_extension(datum.mix_file.basename)[0] for datum in coverage_data if not _coverage_enabled_for_target(coverage_source_patterns, datum.target_label)]
modules_to_exclude = collections.uniq(modules_to_exclude) # remove duplicates
expected_covered_expressions_percentage = ctx.attr.expected_covered_expressions_percentage
expected_uncovered_expression_count = ctx.attr.expected_uncovered_expression_count
strict_coverage_analysis = ctx.attr.strict_coverage_analysis
coverage_report_format = ctx.attr.coverage_report_format
if coverage_report_format != "text" and coverage_report_format != "html":
fail("""haskell_test attribute "coverage_report_format" must be one of "text" or "html".""")
wrapper = hs.actions.declare_file("{}_coverage/coverage_wrapper.sh".format(ctx.label.name))
ctx.actions.expand_template(
template = ctx.file._coverage_wrapper_template,
output = wrapper,
substitutions = {
"{binary_path}": shell.quote(binary_path),
"{hpc_path}": shell.quote(hpc_path),
"{tix_file_path}": shell.quote(tix_file_path),
"{expected_covered_expressions_percentage}": str(expected_covered_expressions_percentage),
"{expected_uncovered_expression_count}": str(expected_uncovered_expression_count),
"{mix_file_paths}": shell.array_literal(mix_file_paths),
"{modules_to_exclude}": shell.array_literal(modules_to_exclude),
"{strict_coverage_analysis}": str(strict_coverage_analysis),
"{coverage_report_format}": shell.quote(ctx.attr.coverage_report_format),
"{package_path}": shell.quote(ctx.label.package),
},
is_executable = True,
)
executable = wrapper
mix_runfiles = [datum.mix_file for datum in coverage_data]
srcs_runfiles = [_condition_coverage_src(hs, datum.src_file) for datum in coverage_data]
extra_runfiles = [
ctx.file._bash_runfiles,
hs.toolchain.tools.hpc,
binary,
] + mix_runfiles + srcs_runfiles
return [
hs_info,
cc_info,
DefaultInfo(
executable = executable,
files = target_files,
runfiles = ctx.runfiles(
files =
solibs +
extra_runfiles,
collect_data = True,
),
),
]
def haskell_library_impl(ctx):
hs = haskell_context(ctx)
dep_info = gather_dep_info(ctx, ctx.attr.deps)
plugin_dep_info = gather_dep_info(
ctx,
[dep for plugin in ctx.attr.plugins for dep in plugin[GhcPluginInfo].deps],
)
version = ctx.attr.version if ctx.attr.version else None
my_pkg_id = pkg_id.new(ctx.label, version)
with_profiling = is_profiling_enabled(hs)
with_shared = False if hs.toolchain.is_windows else not ctx.attr.linkstatic
# Add any interop info for other languages.
cc = cc_interop_info(ctx)
java = java_interop_info(ctx)
srcs_files, import_dir_map = _prepare_srcs(ctx.attr.srcs)
other_modules = ctx.attr.hidden_modules
exposed_modules_reexports = _exposed_modules_reexports(ctx.attr.exports)
c = hs.toolchain.actions.compile_library(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs = srcs_files,
ls_modules = ctx.executable._ls_modules,
other_modules = other_modules,
exposed_modules_reexports = exposed_modules_reexports,
import_dir_map = import_dir_map,
extra_srcs = depset(ctx.files.extra_srcs),
user_compile_flags = ctx.attr.compiler_flags,
with_shared = with_shared,
with_profiling = False,
my_pkg_id = my_pkg_id,
plugins = ctx.attr.plugins,
)
c_p = None
if with_profiling:
c_p = hs.toolchain.actions.compile_library(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs = srcs_files,
ls_modules = ctx.executable._ls_modules,
other_modules = other_modules,
exposed_modules_reexports = exposed_modules_reexports,
import_dir_map = import_dir_map,
# NOTE We must make the object files compiled without profiling
# available to this step for TH to work, presumably because GHC is
# linked against RTS without profiling.
extra_srcs = depset(transitive = [
depset(ctx.files.extra_srcs),
depset([c.objects_dir]),
]),
user_compile_flags = ctx.attr.compiler_flags,
# NOTE We can't have profiling and dynamic code at the
# same time, see:
# https://ghc.haskell.org/trac/ghc/ticket/15394
with_shared = False,
with_profiling = True,
my_pkg_id = my_pkg_id,
plugins = ctx.attr.plugins,
)
static_library = link_library_static(
hs,
cc,
dep_info,
c.objects_dir,
my_pkg_id,
with_profiling = False,
)
if with_shared:
dynamic_library = link_library_dynamic(
hs,
cc,
dep_info,
depset(ctx.files.extra_srcs),
c.objects_dir,
my_pkg_id,
)
dynamic_libraries = set.insert(
dep_info.dynamic_libraries,
dynamic_library,
)
else:
dynamic_library = None
dynamic_libraries = dep_info.dynamic_libraries
static_library_prof = None
if with_profiling:
static_library_prof = link_library_static(
hs,
cc,
dep_info,
c_p.objects_dir,
my_pkg_id,
with_profiling = True,
)
conf_file, cache_file = package(
hs,
dep_info,
c.interfaces_dir,
c_p.interfaces_dir if c_p != None else None,
static_library,
dynamic_library,
c.exposed_modules_file,
other_modules,
my_pkg_id,
static_library_prof = static_library_prof,
)
static_libraries_prof = dep_info.static_libraries_prof
if static_library_prof != None:
static_libraries_prof = [static_library_prof] + dep_info.static_libraries_prof
interface_dirs = set.union(
dep_info.interface_dirs,
set.singleton(c.interfaces_dir),
)
if c_p != None:
interface_dirs = set.mutable_union(
interface_dirs,
set.singleton(c_p.interfaces_dir),
)
version_macros = set.empty()
if version != None:
version_macros = set.singleton(
generate_version_macros(ctx, hs.name, version),
)
hs_info = HaskellInfo(
package_ids = set.insert(dep_info.package_ids, pkg_id.to_string(my_pkg_id)),
package_databases = set.insert(dep_info.package_databases, cache_file),
version_macros = version_macros,
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
# NOTE We have to use lists for static libraries because the order is
# important for linker. Linker searches for unresolved symbols to the
# left, i.e. you first feed a library which has unresolved symbols and
# then you feed the library which resolves the symbols.
static_libraries = [static_library] + dep_info.static_libraries,
static_libraries_prof = static_libraries_prof,
dynamic_libraries = dynamic_libraries,
interface_dirs = interface_dirs,
compile_flags = c.compile_flags,
prebuilt_dependencies = dep_info.prebuilt_dependencies,
cc_dependencies = dep_info.cc_dependencies,
transitive_cc_dependencies = dep_info.transitive_cc_dependencies,
)
lib_info = HaskellLibraryInfo(
package_id = pkg_id.to_string(my_pkg_id),
version = version,
)
dep_coverage_data = []
for dep in ctx.attr.deps:
if HaskellCoverageInfo in dep:
dep_coverage_data += dep[HaskellCoverageInfo].coverage_data
coverage_info = HaskellCoverageInfo(
coverage_data = dep_coverage_data + c.coverage_data,
)
target_files = depset([file for file in [static_library, dynamic_library] if file])
if hasattr(ctx, "outputs"):
build_haskell_repl(
hs,
ghci_script = ctx.file._ghci_script,
ghci_repl_wrapper = ctx.file._ghci_repl_wrapper,
repl_ghci_args = ctx.attr.repl_ghci_args,
user_compile_flags = ctx.attr.compiler_flags,
output = ctx.outputs.repl,
package_databases = dep_info.package_databases,
version = ctx.attr.version,
hs_info = hs_info,
lib_info = lib_info,
)
# XXX Temporary backwards compatibility hack. Remove eventually.
# See https://github.com/tweag/rules_haskell/pull/460.
ln(hs, ctx.outputs.repl, ctx.outputs.repl_deprecated)
build_haskell_runghc(
hs,
runghc_wrapper = ctx.file._ghci_repl_wrapper,
extra_args = ctx.attr.runcompile_flags,
user_compile_flags = ctx.attr.compiler_flags,
output = ctx.outputs.runghc,
package_databases = dep_info.package_databases,
version = ctx.attr.version,
hs_info = hs_info,
lib_info = lib_info,
)
default_info = None
if hasattr(ctx, "runfiles"):
default_info = DefaultInfo(
files = target_files,
runfiles = ctx.runfiles(collect_data = True),
)
else:
default_info = DefaultInfo(
files = target_files,
)
# Create a CcInfo provider so that CC rules can work with
# a haskell library as if it was a regular CC one.
# XXX Workaround https://github.com/bazelbuild/bazel/issues/6874.
# Should be find_cpp_toolchain() instead.
cc_toolchain = ctx.attr._cc_toolchain[cc_common.CcToolchainInfo]
feature_configuration = cc_common.configure_features(
cc_toolchain = cc_toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
)
library_to_link = cc_common.create_library_to_link(
actions = ctx.actions,
feature_configuration = feature_configuration,
dynamic_library = dynamic_library,
static_library = static_library,
cc_toolchain = cc_toolchain,
)
compilation_context = cc_common.create_compilation_context()
linking_context = cc_common.create_linking_context(
libraries_to_link = [library_to_link],
)
cc_info = cc_common.merge_cc_infos(
cc_infos = [
CcInfo(
compilation_context = compilation_context,
linking_context = linking_context,
),
] + [dep[CcInfo] for dep in ctx.attr.deps if CcInfo in dep],
)
return [
hs_info,
cc_info,
coverage_info,
default_info,
lib_info,
]
def haskell_toolchain_library_impl(ctx):
hs = haskell_context(ctx)
if ctx.attr.package:
package = ctx.attr.package
else:
package = ctx.label.name
id_file = hs.actions.declare_file(target_unique_name(hs, "id"))
hs.actions.run_shell(
inputs = [hs.tools.ghc_pkg],
outputs = [id_file],
command = """
"$1" --simple-output -v1 field "$2" id > "$3"
""",
arguments = [
hs.tools.ghc_pkg.path,
package,
id_file.path,
],
)
version_macros_file = hs.actions.declare_file("{}_version_macros.h".format(hs.name))
hs.actions.run_shell(
inputs = [hs.tools.ghc_pkg, ctx.executable._version_macros],
outputs = [version_macros_file],
command = """
"$1" \\
`"$2" --simple-output -v1 field "$3" name` \\
`"$2" --simple-output -v1 field "$3" version` \\
> "$4"
""",
arguments = [
ctx.executable._version_macros.path,
hs.tools.ghc_pkg.path,
package,
version_macros_file.path,
],
)
prebuilt_package_info = HaskellPrebuiltPackageInfo(
package = package,
id_file = id_file,
version_macros_file = version_macros_file,
)
return [prebuilt_package_info]
def _exposed_modules_reexports(exports):
"""Creates a ghc-pkg-compatible list of reexport declarations.
A ghc-pkg registration file declares reexports as part of the
exposed-modules field in the following format:
exposed-modules: A, B, C from pkg-c:C, D from pkg-d:Original.D
Here, the Original.D module from pkg-d is renamed by virtue of a
different name being used before the "from" keyword.
This function creates a ghc-pkg-compatible list of reexport declarations
(as shown above) from a dictionary mapping package targets to "Cabal-style"
reexported-modules declarations. That is, something like:
{
":pkg-c": "C",
":pkg-d": "Original.D as D",
":pkg-e": "E1, Original.E2 as E2",
}
Args:
exports: a dictionary mapping package targets to "Cabal-style"
reexported-modules declarations.
Returns:
a ghc-pkg-compatible list of reexport declarations.
"""
exposed_reexports = []
for dep, cabal_decls in exports.items():
for cabal_decl in cabal_decls.split(","):
stripped_cabal_decl = cabal_decl.strip()
cabal_decl_parts = stripped_cabal_decl.split(" as ")
original = cabal_decl_parts[0]
if len(cabal_decl_parts) == 2:
reexported = cabal_decl_parts[1]
else:
reexported = cabal_decl_parts[0]
if HaskellPrebuiltPackageInfo in dep:
pkg = dep[HaskellPrebuiltPackageInfo].package
elif HaskellLibraryInfo in dep:
pkg = dep[HaskellLibraryInfo].package_id
exposed_reexport = "{reexported} from {pkg}:{original}".format(
reexported = reexported,
pkg = pkg,
original = original,
)
exposed_reexports.append(exposed_reexport)
return exposed_reexports

View file

@ -0,0 +1,48 @@
"""Interop with Java."""
load("@bazel_skylib//lib:collections.bzl", "collections")
JavaInteropInfo = provider(
doc = "Information needed for interop with Java rules.",
fields = {
"inputs": "Files needed during build.",
"env": "Dict with env variables that should be set during build.",
},
)
def java_interop_info(ctx):
"""Gather information from any Java dependencies.
Args:
ctx: Rule context.
Returns:
JavaInteropInfo: Information needed for Java interop.
"""
inputs = depset(
transitive = [
# We only expose direct dependencies, though we could
# expose transitive ones as well. Only exposing the direct
# ones corresponds to Bazel's "strict Java dependencies"
# mode. See
# https://github.com/tweag/rules_haskell/issues/96.
dep[JavaInfo].compile_jars
for dep in ctx.attr.deps
if JavaInfo in dep
],
)
env_dict = dict()
uniq_classpath = collections.uniq([
f.path
for f in inputs
])
if len(uniq_classpath) > 0:
env_dict["CLASSPATH"] = ":".join(uniq_classpath)
return JavaInteropInfo(
inputs = inputs,
env = env_dict,
)

View file

@ -0,0 +1,26 @@
"""Helper functions on lists."""
load(":private/set.bzl", "set")
def _dedup_on(f, list_):
"""deduplicate `list_` by comparing the result of applying
f to each element (e.g. comparing sub fields)
def compare_x(el):
return el.x
dedup_on([struct(x=3), struct(x=4), struct(x=3)], compare_x)
=> [struct(x=3), struct(x=4)]
"""
seen = set.empty()
deduped = []
for el in list_:
by = f(el)
if not set.is_member(seen, by):
set.mutable_insert(seen, by)
deduped.append(el)
return deduped
list = struct(
dedup_on = _dedup_on,
)

View file

@ -0,0 +1,109 @@
#!/usr/bin/env python
#
# Create a list of exposed modules (including reexported modules)
# given a directory full of interface files and the content of the
# global package database (to mine the versions of all prebuilt
# dependencies). The exposed modules are filtered using a provided
# list of hidden modules, and augmented with reexport declarations.
from __future__ import unicode_literals, print_function
import collections
import fnmatch
import itertools
import os
import re
import sys
import io
if len(sys.argv) != 6:
sys.exit("Usage: %s <DIRECTORY> <GLOBAL_PKG_DB> <HIDDEN_MODS_FILE> <REEXPORTED_MODS_FILE> <RESULT_FILE>" % sys.argv[0])
root = sys.argv[1]
global_pkg_db_dump = sys.argv[2]
hidden_modules_file = sys.argv[3]
reexported_modules_file = sys.argv[4]
results_file = sys.argv[5]
with io.open(global_pkg_db_dump, "r", encoding='utf8') as f:
names = [line.split()[1] for line in f if line.startswith("name:")]
f.seek(0)
ids = [line.split()[1] for line in f if line.startswith("id:")]
# A few sanity checks.
assert len(names) == len(ids)
# compute duplicate, i.e. package name associated with multiples ids
duplicates = set()
if len(names) != len(set(names)):
duplicates = set([
name for name, count in collections.Counter(names).items()
if count > 1
])
# This associate pkg name to pkg id
pkg_ids_map = dict(zip(names, ids))
with io.open(hidden_modules_file, "r", encoding='utf8') as f:
hidden_modules = [mod.strip() for mod in f.read().split(",")]
with io.open(reexported_modules_file, "r", encoding='utf8') as f:
raw_reexported_modules = (
mod.strip() for mod in f.read().split(",") if mod.strip()
)
# Substitute package ids for package names in reexports, because
# GHC really wants package ids.
regexp = re.compile("from (%s):" % "|".join(map(re.escape, pkg_ids_map)))
def replace_pkg_by_pkgid(match):
pkgname = match.group(1)
if pkgname in duplicates:
sys.exit(
"\n".join([
"Multiple versions of the following packages installed: ",
", ".join(duplicates),
"\nThe following was explictly used: " + pkgname,
"\nThis is not currently supported.",
])
)
return "from %s:" % pkg_ids_map[pkgname]
reexported_modules = (
regexp.sub(replace_pkg_by_pkgid, mod)
for mod in raw_reexported_modules
)
def handle_walk_error(e):
print("""
Failed to list interface files:
{}
On Windows you may need to enable long file path support:
Set-ItemProperty -Path 'HKLM:\SYSTEM\CurrentControlSet\Control\FileSystem' -Name 'LongPathsEnabled' -Value 1
""".strip().format(e), file=sys.stderr)
exit(1)
interface_files = (
os.path.join(path, f)
for path, dirs, files in os.walk(root, onerror=handle_walk_error)
for f in fnmatch.filter(files, '*.hi')
)
modules = (
# replace directory separators by . to generate module names
# / and \ are respectively the separators for unix (linux / darwin) and windows systems
os.path.splitext(os.path.relpath(f, start=root))[0]
.replace("/",".")
.replace("\\",".")
for f in interface_files
)
exposed_modules = (
m
for m in modules
if m not in hidden_modules
)
with io.open(results_file, "w", encoding='utf8') as f:
f.write(", ".join(itertools.chain(exposed_modules, reexported_modules)))

View file

@ -0,0 +1,12 @@
"""Compilation modes."""
def is_profiling_enabled(hs):
"""Check whether profiling mode is enabled.
Args:
hs: Haskell context.
Returns:
bool: True if the mode is enabled, False otherwise.
"""
return hs.mode == "dbg"

View file

@ -0,0 +1,313 @@
#!/bin/bash
#
# Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is a wrapper script around gcc/clang that adjusts linker flags for
# Haskell library and binary targets.
#
# Load commands that attempt to load dynamic libraries relative to the working
# directory in their package output path (bazel-out/...) are converted to load
# commands relative to @rpath. rules_haskell passes the corresponding
# -Wl,-rpath,... flags itself.
#
# rpath commands that attempt to add rpaths relative to the working directory
# to look for libraries in their package output path (bazel-out/...) are
# omitted, since rules_haskell adds more appropriate rpaths itself.
#
# GHC generates intermediate dynamic libraries outside the build tree.
# Additional RPATH entries are provided for those to make dynamic library
# dependencies in the Bazel build tree available at runtime.
#
# See https://blogs.oracle.com/dipol/entry/dynamic_libraries_rpath_and_mac
# on how to set those paths for Mach-O binaries.
#
set -euo pipefail
INSTALL_NAME_TOOL="/usr/bin/install_name_tool"
OTOOL="/usr/bin/otool"
# Collect arguments to forward in a fresh response file.
RESPONSE_FILE="$(mktemp osx_cc_args_XXXX.rsp)"
rm_response_file() {
rm -f "$RESPONSE_FILE"
}
trap rm_response_file EXIT
add_args() {
# Add the given arguments to the fresh response file. We follow GHC's
# example in storing one argument per line, wrapped in double quotes. Double
# quotes in the argument itself are escaped.
for arg in "$@"; do
printf '"%s"\n' "${arg//\"/\\\"}" >> "$RESPONSE_FILE"
done
}
# Collect library, library dir, and rpath arguments.
LIBS=()
LIB_DIRS=()
RPATHS=()
# Parser state.
# Parsing response file - unquote arguments.
QUOTES=
# Upcoming linker argument.
LINKER=
# Upcoming rpath argument.
RPATH=
# Upcoming install-name argument.
INSTALL=
# Upcoming output argument.
OUTPUT=
parse_arg() {
# Parse the given argument. Decide whether to pass it on to the compiler,
# and how it affects the parser state.
local arg="$1"
# Unquote response file arguments.
if [[ "$QUOTES" = "1" && "$arg" =~ ^\"(.*)\"$ ]]; then
# Take GHC's argument quoting into account when parsing a response
# file. Note, no indication was found that GHC would pass multiline
# arguments, or insert escape codes into the quoted arguments. If you
# observe ill-formed arguments being passed to the compiler, then this
# logic may need to be extended.
arg="${BASH_REMATCH[1]}"
fi
# Parse given argument.
if [[ "$OUTPUT" = "1" ]]; then
# The previous argument was -o. Read output file.
OUTPUT="$arg"
add_args "$arg"
elif [[ "$LINKER" = "1" ]]; then
# The previous argument was -Xlinker. Read linker argument.
if [[ "$RPATH" = "1" ]]; then
# The previous argument was -rpath. Read RPATH.
parse_rpath "$arg"
RPATH=0
elif [[ "$arg" = "-rpath" ]]; then
# rpath is coming
RPATH=1
else
# Unrecognized linker argument. Pass it on.
add_args "-Xlinker" "$arg"
fi
LINKER=
elif [[ "$INSTALL" = "1" ]]; then
INSTALL=
add_args "$arg"
elif [[ "$arg" =~ ^@(.*)$ ]]; then
# Handle response file argument. Parse the arguments contained in the
# response file one by one. Take GHC's argument quoting into account.
# Note, assumes that response file arguments are not nested in other
# response files.
QUOTES=1
while read line; do
parse_arg "$line"
done < "${BASH_REMATCH[1]}"
QUOTES=
elif [[ "$arg" = "-install_name" ]]; then
# Install name is coming. We don't use it, but it can start with an @
# and be mistaken for a response file.
INSTALL=1
add_args "$arg"
elif [[ "$arg" = "-o" ]]; then
# output is coming
OUTPUT=1
add_args "$arg"
elif [[ "$arg" = "-Xlinker" ]]; then
# linker flag is coming
LINKER=1
elif [[ "$arg" =~ ^-l(.*)$ ]]; then
LIBS+=("${BASH_REMATCH[1]}")
add_args "$arg"
elif [[ "$arg" =~ ^-L(.*)$ ]]; then
LIB_DIRS+=("${BASH_REMATCH[1]}")
add_args "$arg"
elif [[ "$arg" =~ ^-Wl,-rpath,(.*)$ ]]; then
parse_rpath "${BASH_REMATCH[1]}"
else
# Unrecognized argument. Pass it on.
add_args "$arg"
fi
}
parse_rpath() {
# Parse the given -rpath argument and decide whether it should be
# forwarded to the compiler/linker.
local rpath="$1"
if [[ "$rpath" =~ ^/ || "$rpath" =~ ^@ ]]; then
# Absolute rpaths or rpaths relative to @loader_path or similar, are
# passed on to the linker. Other relative rpaths are dropped, these
# are auto-generated by GHC, but are useless because rules_haskell
# constructs dedicated rpaths to the _solib or _hssolib directory.
# See https://github.com/tweag/rules_haskell/issues/689
add_args "-Wl,-rpath,$rpath"
RPATHS+=("$rpath")
fi
}
# Parse all given arguments.
for arg in "$@"; do
parse_arg "$arg"
done
get_library_in() {
# Find the given library in the given directory.
# Returns empty string if the library is not found.
local lib="$1"
local dir="$2"
local solib="${dir}${dir:+/}lib${lib}.so"
local dylib="${dir}${dir:+/}lib${lib}.dylib"
if [[ -f "$solib" ]]; then
echo "$solib"
elif [[ -f "$dylib" ]]; then
echo "$dylib"
fi
}
get_library_path() {
# Find the given library in the specified library search paths.
# Returns empty string if the library is not found.
if [[ ${#LIB_DIRS[@]} -gt 0 ]]; then
local libpath
for libdir in "${LIB_DIRS[@]}"; do
libpath="$(get_library_in "$1" "$libdir")"
if [[ -n "$libpath" ]]; then
echo "$libpath"
return
fi
done
fi
}
resolve_rpath() {
# Resolve the given rpath. I.e. if it is an absolute path, just return it.
# If it is relative to the output, then prepend the output path.
local rpath="$1"
if [[ "$rpath" =~ ^/ ]]; then
echo "$rpath"
elif [[ "$rpath" =~ ^@loader_path/(.*)$ || "$rpath" =~ ^@executable_path/(.*)$ ]]; then
echo "$(dirname "$OUTPUT")/${BASH_REMATCH[1]}"
else
echo "$rpath"
fi
}
get_library_rpath() {
# Find the given library in the specified rpaths.
# Returns empty string if the library is not found.
if [[ ${#RPATHS[@]} -gt 0 ]]; then
local libdir libpath
for rpath in "${RPATHS[@]}"; do
libdir="$(resolve_rpath "$rpath")"
libpath="$(get_library_in "$1" "$libdir")"
if [[ -n "$libpath" ]]; then
echo "$libpath"
return
fi
done
fi
}
get_library_name() {
# Get the "library name" of the given library.
"$OTOOL" -D "$1" | tail -1
}
relpath() {
# Find relative path from the first to the second path. Assuming the first
# is a directory. If either is an absolute path, then we return the
# absolute path to the second.
local from="$1"
local to="$2"
if [[ "$to" =~ ^/ ]]; then
echo "$to"
elif [[ "$from" =~ ^/ ]]; then
echo "$PWD/$to"
else
# Split path and store components in bash array.
IFS=/ read -a fromarr <<<"$from"
IFS=/ read -a toarr <<<"$to"
# Drop common prefix.
for ((i=0; i < ${#fromarr[@]}; ++i)); do
if [[ "${fromarr[$i]}" != "${toarr[$i]}" ]]; then
break
fi
done
# Construct relative path.
local common=$i
local out=
for ((i=$common; i < ${#fromarr[@]}; ++i)); do
out="$out${out:+/}.."
done
for ((i=$common; i < ${#toarr[@]}; ++i)); do
out="$out${out:+/}${toarr[$i]}"
done
echo $out
fi
}
generate_rpath() {
# Generate an rpath entry for the given library path.
local rpath="$(relpath "$(dirname "$OUTPUT")" "$(dirname "$1")")"
if [[ "$rpath" =~ ^/ ]]; then
echo "$rpath"
else
# Relative rpaths are relative to the binary.
echo "@loader_path${rpath:+/}$rpath"
fi
}
if [[ ! "$OUTPUT" =~ ^bazel-out/ && ${#LIBS[@]} -gt 0 ]]; then
# GHC generates temporary dynamic libraries during compilation outside of
# the build directory. References to dynamic C libraries are broken in this
# case. Here we add additional RPATHs to fix these references. The Hazel
# package for swagger2 is an example that triggers this issue.
for lib in "${LIBS[@]}"; do
librpath="$(get_library_rpath "$lib")"
if [[ -z "$librpath" ]]; then
# The given library was not found in any of the rpaths.
# Find it in the library search paths.
libpath="$(get_library_path "$lib")"
if [[ "$libpath" =~ ^bazel-out/ ]]; then
# The library is Bazel generated and loaded relative to PWD.
# Add an RPATH entry, so it is found at runtime.
rpath="$(generate_rpath "$libpath")"
parse_rpath "$rpath"
fi
fi
done
fi
# Call the C++ compiler with the fresh response file.
%{cc} "@$RESPONSE_FILE"
if [[ ${#LIBS[@]} -gt 0 ]]; then
# Replace load commands relative to the working directory, by load commands
# relative to the rpath, if the library can be found relative to an rpath.
for lib in "${LIBS[@]}"; do
librpath="$(get_library_rpath "$lib")"
if [[ -n "$librpath" ]]; then
libname="$(get_library_name "$librpath")"
if [[ "$libname" =~ ^bazel-out/ ]]; then
"${INSTALL_NAME_TOOL}" -change \
"$libname" \
"@rpath/$(basename "$librpath")" \
"$OUTPUT"
fi
fi
done
fi
# vim: ft=sh

View file

@ -0,0 +1,94 @@
"""Package list handling"""
load(":private/set.bzl", "set")
def pkg_info_to_compile_flags(pkg_info, for_plugin = False):
"""Map package info to GHC command-line arguments.
Args:
pkg_info: Package info collected by `ghc_info()`.
for_plugin: Whether the package is a plugin dependency.
Returns:
The list of command-line arguments that should be passed to GHC.
"""
namespace = "plugin-" if for_plugin else ""
args = [
# In compile.bzl, we pass this just before all -package-id
# arguments. Not doing so leads to bizarre compile-time failures.
# It turns out that equally, not doing so leads to bizarre
# link-time failures. See
# https://github.com/tweag/rules_haskell/issues/395.
"-hide-all-{}packages".format(namespace),
]
if not pkg_info.has_version:
args.extend([
# Macro version are disabled for all packages by default
# and enabled for package with version
# see https://github.com/tweag/rules_haskell/issues/414
"-fno-version-macros",
])
for package in pkg_info.packages:
args.extend(["-{}package".format(namespace), package])
for package_id in pkg_info.package_ids:
args.extend(["-{}package-id".format(namespace), package_id])
for package_db in pkg_info.package_dbs:
args.extend(["-package-db", package_db])
return args
def expose_packages(hs_info, lib_info, use_direct, use_my_pkg_id, custom_package_databases, version):
"""
Returns the information that is needed by GHC in order to enable haskell
packages.
hs_info: is common to all builds
version: if the rule contains a version, we will export the CPP version macro
All the other arguments are not understood well:
lib_info: only used for repl and linter
use_direct: only used for repl and linter
use_my_pkg_id: only used for one specific task in compile.bzl
custom_package_databases: override the package_databases of hs_info, used only by the repl
"""
has_version = version != None and version != ""
# Expose all prebuilt dependencies
#
# We have to remember to specify all (transitive) wired-in
# dependencies or we can't find objects for linking
#
# Set use_direct if hs_info does not have a direct_prebuilt_deps field.
packages = []
for prebuilt_dep in set.to_list(hs_info.direct_prebuilt_deps if use_direct else hs_info.prebuilt_dependencies):
packages.append(prebuilt_dep.package)
# Expose all bazel dependencies
package_ids = []
for package in set.to_list(hs_info.package_ids):
# XXX: repl and lint uses this lib_info flags
# It is set to None in all other usage of this function
# TODO: find the meaning of this flag
if lib_info == None or package != lib_info.package_id:
# XXX: use_my_pkg_id is not None only in compile.bzl
if (use_my_pkg_id == None) or package != use_my_pkg_id:
package_ids.append(package)
# Only include package DBs for deps, prebuilt deps should be found
# auto-magically by GHC
package_dbs = []
for cache in set.to_list(hs_info.package_databases if not custom_package_databases else custom_package_databases):
package_dbs.append(cache.dirname)
ghc_info = struct(
has_version = has_version,
packages = packages,
package_ids = package_ids,
package_dbs = package_dbs,
)
return ghc_info

View file

@ -0,0 +1,471 @@
"""Utilities for module and path manipulations."""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(":private/set.bzl", "set")
def module_name(hs, f, rel_path = None):
"""Given Haskell source file path, turn it into a dot-separated module name.
module_name(
hs,
"some-workspace/some-package/src/Foo/Bar/Baz.hs",
) => "Foo.Bar.Baz"
Args:
hs: Haskell context.
f: Haskell source file.
rel_path: Explicit relative path from import root to the module, or None
if it should be deduced.
Returns:
string: Haskell module name.
"""
rpath = rel_path
if not rpath:
rpath = _rel_path_to_module(hs, f)
(hsmod, _) = paths.split_extension(rpath.replace("/", "."))
return hsmod
def target_unique_name(hs, name_prefix):
"""Make a target-unique name.
`name_prefix` is made target-unique by adding a rule name
suffix to it. This means that given two different rules, the same
`name_prefix` is distinct. Note that this is does not disambiguate two
names within the same rule. Given a haskell_library with name foo
you could expect:
target_unique_name(hs, "libdir") => "libdir-foo"
This allows two rules using same name_prefix being built in same
environment to avoid name clashes of their output files and directories.
Args:
hs: Haskell context.
name_prefix: Template for the name.
Returns:
string: Target-unique name_prefix.
"""
return "{0}-{1}".format(name_prefix, hs.name)
def module_unique_name(hs, source_file, name_prefix):
"""Make a target- and module- unique name.
module_unique_name(
hs,
"some-workspace/some-package/src/Foo/Bar/Baz.hs",
"libdir"
) => "libdir-foo-Foo.Bar.Baz"
This is quite similar to `target_unique_name` but also uses a path built
from `source_file` to prevent clashes with other names produced using the
same `name_prefix`.
Args:
hs: Haskell context.
source_file: Source file name.
name_prefix: Template for the name.
Returns:
string: Target- and source-unique name.
"""
return "{0}-{1}".format(
target_unique_name(hs, name_prefix),
module_name(hs, source_file),
)
def declare_compiled(hs, src, ext, directory = None, rel_path = None):
"""Given a Haskell-ish source file, declare its output.
Args:
hs: Haskell context.
src: Haskell source file.
ext: New extension.
directory: String, directory prefix the new file should live in.
rel_path: Explicit relative path from import root to the module, or None
if it should be deduced.
Returns:
File: Declared output file living in `directory` with given `ext`.
"""
rpath = rel_path
if not rpath:
rpath = _rel_path_to_module(hs, src)
fp = paths.replace_extension(rpath, ext)
fp_with_dir = fp if directory == None else paths.join(directory, fp)
return hs.actions.declare_file(fp_with_dir)
def make_path(libs, prefix = None, sep = None):
"""Return a string value for using as LD_LIBRARY_PATH or similar.
Args:
libs: List of library files that should be available
prefix: String, an optional prefix to add to every path.
sep: String, the path separator, defaults to ":".
Returns:
String: paths to the given library directories separated by ":".
"""
r = set.empty()
sep = sep if sep else ":"
for lib in libs:
lib_dir = paths.dirname(lib.path)
if prefix:
lib_dir = paths.join(prefix, lib_dir)
set.mutable_insert(r, lib_dir)
return sep.join(set.to_list(r))
def darwin_convert_to_dylibs(hs, libs):
"""Convert .so dynamic libraries to .dylib.
Bazel's cc_library rule will create .so files for dynamic libraries even
on MacOS. GHC's builtin linker, which is used during compilation, GHCi,
or doctests, hard-codes the assumption that all dynamic libraries on MacOS
end on .dylib. This function serves as an adaptor and produces symlinks
from a .dylib version to the .so version for every dynamic library
dependencies that does not end on .dylib.
Args:
hs: Haskell context.
libs: List of library files dynamic or static.
Returns:
List of library files where all dynamic libraries end on .dylib.
"""
lib_prefix = "_dylibs"
new_libs = []
for lib in libs:
if is_shared_library(lib) and lib.extension != "dylib":
dylib_name = paths.join(
target_unique_name(hs, lib_prefix),
lib.dirname,
"lib" + get_lib_name(lib) + ".dylib",
)
dylib = hs.actions.declare_file(dylib_name)
ln(hs, lib, dylib)
new_libs.append(dylib)
else:
new_libs.append(lib)
return new_libs
def windows_convert_to_dlls(hs, libs):
"""Convert .so dynamic libraries to .dll.
Bazel's cc_library rule will create .so files for dynamic libraries even
on Windows. GHC's builtin linker, which is used during compilation, GHCi,
or doctests, hard-codes the assumption that all dynamic libraries on Windows
end on .dll. This function serves as an adaptor and produces symlinks
from a .dll version to the .so version for every dynamic library
dependencies that does not end on .dll.
Args:
hs: Haskell context.
libs: List of library files dynamic or static.
Returns:
List of library files where all dynamic libraries end on .dll.
"""
lib_prefix = "_dlls"
new_libs = []
for lib in libs:
if is_shared_library(lib) and lib.extension != "dll":
dll_name = paths.join(
target_unique_name(hs, lib_prefix),
paths.dirname(lib.short_path),
"lib" + get_lib_name(lib) + ".dll",
)
dll = hs.actions.declare_file(dll_name)
ln(hs, lib, dll)
new_libs.append(dll)
else:
new_libs.append(lib)
return new_libs
def get_lib_name(lib):
"""Return name of library by dropping extension and "lib" prefix.
Args:
lib: The library File.
Returns:
String: name of library.
"""
base = lib.basename[3:] if lib.basename[:3] == "lib" else lib.basename
n = base.find(".so.")
end = paths.replace_extension(base, "") if n == -1 else base[:n]
return end
def link_libraries(libs_to_link, args):
"""Add linker flags to link against the given libraries.
Args:
libs_to_link: List of library Files.
args: Append arguments to this list.
Returns:
List of library names that were linked.
"""
seen_libs = set.empty()
libraries = []
for lib in libs_to_link:
lib_name = get_lib_name(lib)
if not set.is_member(seen_libs, lib_name):
set.mutable_insert(seen_libs, lib_name)
args += ["-l{0}".format(lib_name)]
libraries.append(lib_name)
def is_shared_library(f):
"""Check if the given File is a shared library.
Args:
f: The File to check.
Returns:
Bool: True if the given file `f` is a shared library, False otherwise.
"""
return f.extension in ["so", "dylib"] or f.basename.find(".so.") != -1
def is_static_library(f):
"""Check if the given File is a static library.
Args:
f: The File to check.
Returns:
Bool: True if the given file `f` is a static library, False otherwise.
"""
return f.extension in ["a"]
def _rel_path_to_module(hs, f):
"""Make given file name relative to the directory where the module hierarchy
starts.
_rel_path_to_module(
"some-workspace/some-package/src/Foo/Bar/Baz.hs"
) => "Foo/Bar/Baz.hs"
Args:
hs: Haskell context.
f: Haskell source file.
Returns:
string: Relative path to module file.
"""
# If it's a generated file, strip off the bin or genfiles prefix.
path = f.path
if path.startswith(hs.bin_dir.path):
path = paths.relativize(path, hs.bin_dir.path)
elif path.startswith(hs.genfiles_dir.path):
path = paths.relativize(path, hs.genfiles_dir.path)
return paths.relativize(path, hs.src_root)
# TODO Consider merging with paths.relativize. See
# https://github.com/bazelbuild/bazel-skylib/pull/44.
def _truly_relativize(target, relative_to):
"""Return a relative path to `target` from `relative_to`.
Args:
target: string, path to directory we want to get relative path to.
relative_to: string, path to directory from which we are starting.
Returns:
string: relative path to `target`.
"""
t_pieces = target.split("/")
r_pieces = relative_to.split("/")
common_part_len = 0
for tp, rp in zip(t_pieces, r_pieces):
if tp == rp:
common_part_len += 1
else:
break
result = [".."] * (len(r_pieces) - common_part_len)
result += t_pieces[common_part_len:]
return "/".join(result)
def ln(hs, target, link, extra_inputs = depset()):
"""Create a symlink to target.
Args:
hs: Haskell context.
extra_inputs: extra phony dependencies of symlink.
Returns:
None
"""
relative_target = _truly_relativize(target.path, link.dirname)
hs.actions.run_shell(
inputs = depset([target], transitive = [extra_inputs]),
outputs = [link],
mnemonic = "Symlink",
command = "ln -s {target} {link}".format(
target = relative_target,
link = link.path,
),
use_default_shell_env = True,
)
def link_forest(ctx, srcs, basePath = ".", **kwargs):
"""Write a symlink to each file in `srcs` into a destination directory
defined using the same arguments as `ctx.actions.declare_directory`"""
local_files = []
for src in srcs.to_list():
dest = ctx.actions.declare_file(
paths.join(basePath, src.basename),
**kwargs
)
local_files.append(dest)
ln(ctx, src, dest)
return local_files
def copy_all(ctx, srcs, dest):
"""Copy all the files in `srcs` into `dest`"""
if list(srcs.to_list()) == []:
ctx.actions.run_shell(
command = "mkdir -p {dest}".format(dest = dest.path),
outputs = [dest],
)
else:
args = ctx.actions.args()
args.add_all(srcs)
ctx.actions.run_shell(
inputs = depset(srcs),
outputs = [dest],
mnemonic = "Copy",
command = "mkdir -p {dest} && cp -L -R \"$@\" {dest}".format(dest = dest.path),
arguments = [args],
)
def parse_pattern(ctx, pattern_str):
"""Parses a string label pattern.
Args:
ctx: Standard Bazel Rule context.
pattern_str: The pattern to parse.
Patterns are absolute labels in the local workspace. E.g.
`//some/package:some_target`. The following wild-cards are allowed:
`...`, `:all`, and `:*`. Also the `//some/package` shortcut is allowed.
Returns:
A struct of
package: A list of package path components. May end on the wildcard `...`.
target: The target name. None if the package ends on `...`. May be one
of the wildcards `all` or `*`.
NOTE: it would be better if Bazel itself exposed this functionality to Starlark.
Any feature using this function should be marked as experimental, until the
resolution of https://github.com/bazelbuild/bazel/issues/7763.
"""
# We only load targets in the local workspace anyway. So, it's never
# necessary to specify a workspace. Therefore, we don't allow it.
if pattern_str.startswith("@"):
fail("Invalid haskell_repl pattern. Patterns may not specify a workspace. They only apply to the current workspace")
# To keep things simple, all patterns have to be absolute.
if not pattern_str.startswith("//"):
if not pattern_str.startswith(":"):
fail("Invalid haskell_repl pattern. Patterns must start with either '//' or ':'.")
# if the pattern string doesn't start with a package (it starts with :, e.g. :two),
# then we prepend the contextual package
pattern_str = "//{package}{target}".format(package = ctx.label.package, target = pattern_str)
# Separate package and target (if present).
package_target = pattern_str[2:].split(":", maxsplit = 2)
package_str = package_target[0]
target_str = None
if len(package_target) == 2:
target_str = package_target[1]
# Parse package pattern.
package = []
dotdotdot = False # ... has to be last component in the pattern.
for s in package_str.split("/"):
if dotdotdot:
fail("Invalid haskell_repl pattern. ... has to appear at the end.")
if s == "...":
dotdotdot = True
package.append(s)
# Parse target pattern.
if dotdotdot:
if target_str != None:
fail("Invalid haskell_repl pattern. ... has to appear at the end.")
elif target_str == None:
if len(package) > 0 and package[-1] != "":
target_str = package[-1]
else:
fail("Invalid haskell_repl pattern. The empty string is not a valid target.")
return struct(
package = package,
target = target_str,
)
def match_label(patterns, label):
"""Whether the given local workspace label matches any of the patterns.
Args:
patterns: A list of parsed patterns to match the label against.
Apply `parse_pattern` before passing patterns into this function.
label: Match this label against the patterns.
Returns:
A boolean. True if the label is in the local workspace and matches any of
the given patterns. False otherwise.
NOTE: it would be better if Bazel itself exposed this functionality to Starlark.
Any feature using this function should be marked as experimental, until the
resolution of https://github.com/bazelbuild/bazel/issues/7763.
"""
# Only local workspace labels can match.
# Despite the docs saying otherwise, labels don't have a workspace_name
# attribute. So, we use the workspace_root. If it's empty, the target is in
# the local workspace. Otherwise, it's an external target.
if label.workspace_root != "":
return False
package = label.package.split("/")
target = label.name
# Match package components.
for i in range(min(len(patterns.package), len(package))):
if patterns.package[i] == "...":
return True
elif patterns.package[i] != package[i]:
return False
# If no wild-card or mismatch was encountered, the lengths must match.
# Otherwise, the label's package is not covered.
if len(patterns.package) != len(package):
return False
# Match target.
if patterns.target == "all" or patterns.target == "*":
return True
else:
return patterns.target == target

View file

@ -0,0 +1,67 @@
"""Package identifiers"""
load(":private/mode.bzl", "is_profiling_enabled")
load("@bazel_skylib//lib:paths.bzl", "paths")
def _zencode(s):
"""Z-escape special characters to make a valid GHC package identifier.
Args:
s: string
"""
return s.replace("Z", "ZZ").replace("_", "ZU").replace("/", "ZS")
def _to_string(my_pkg_id):
"""Get a globally unique package identifier.
The identifier is required to be unique for each Haskell rule.
It includes the Bazel package and the name of this component.
We can't use just the latter because then two components with
the same names in different packages would clash.
"""
return _zencode(
paths.join(
my_pkg_id.label.workspace_root,
my_pkg_id.label.package,
my_pkg_id.name,
),
)
def _new(label, version = None):
"""Create a new package identifier.
Package identifiers should be globally unique. This is why we use
a label to identify them.
Args:
label: The label of the rule declaring the package.
version: an optional version annotation.
Returns:
string: GHC package ID to use.
"""
return struct(
label = label,
name = label.name.replace("_", "-"),
version = version,
)
def _library_name(hs, my_pkg_id, prof_suffix = False):
"""Get library name.
Args:
hs: Haskell context.
my_pkg_id: pkg_id struct.
prof_suffix: whether to automatically add profiling suffix.
"""
library_name = "HS" + _to_string(my_pkg_id)
if is_profiling_enabled(hs) and prof_suffix:
library_name += "_p"
return library_name
pkg_id = struct(
new = _new,
to_string = _to_string,
library_name = _library_name,
)

View file

@ -0,0 +1,150 @@
"""Immutable sets that support efficient merging, traversal, and membership
check.
"""
def _empty():
"""Create an empty set.
Returns:
set, new empty set.
"""
return struct(_set_items = dict())
def _singleton(e):
"""Create a set with single element `e` inside.
Args:
e: The element to put in the set.
Returns:
set, new set.
"""
r = dict()
r[e] = None
return struct(_set_items = r)
def _is_member(s, e):
"""Return true if `e` is in the set `s`.
Args:
s: The set to inspect.
e: The element to search for.
Result:
Bool, true if `e` is in `s`, false otherwise.
"""
return e in s._set_items
def _insert(s, e):
"""Insert an element into the set.
Args:
s: Set to insert new element into.
e: The element to insert.
Result:
A copy of set `s` with `s` element added.
"""
r = dict(s._set_items)
r[e] = None
return struct(_set_items = r)
def _mutable_insert(s, e):
"""The same as `set.insert`, but modifies the first argument in place.
Args:
s: Set to insert new element into.
e: The element to insert.
Result:
set `s` with `s` element added.
"""
s._set_items[e] = None
return s
def _union(s0, s1):
"""Return union of two sets.
Args:
s0: One set.
s1: Another set.
Result:
set, union of the two sets.
"""
r = dict(s0._set_items)
r.update(s1._set_items)
return struct(_set_items = r)
def _mutable_union(s0, s1):
"""Modify set `s0` adding elements from `s1` to it.
Args:
s0: One set.
s1: Another set.
Result:
set, union of the two sets.
"""
s0._set_items.update(s1._set_items)
return s0
def _map(s, f):
"""Map elements of given set using a function.
Args:
s: Original set.
f: Function to apply to elements of the set.
Result:
set with elements obtained by application of function `f` to the
elements of `s`.
"""
return struct(_set_items = {f(x): None for x in s._set_items.keys()})
def _from_list(l):
"""Create a set containing elements from given list.
Args:
l: List, source of the elements for the new set.
Result:
set containing elements from given list.
"""
return (struct(_set_items = {x: None for x in l}))
def _to_list(s):
"""Convert set into a list of its elements.
Args:
s: Set to convert.
Returns:
List of elements of the set.
"""
return s._set_items.keys()
def _to_depset(s):
"""Similar to `set.to_list`, but produces a depset.
Args:
s: Set to convert.
Returns:
Depset of elements from the set.
"""
return depset(_to_list(s))
set = struct(
empty = _empty,
singleton = _singleton,
is_member = _is_member,
insert = _insert,
mutable_insert = _mutable_insert,
union = _union,
mutable_union = _mutable_union,
map = _map,
from_list = _from_list,
to_list = _to_list,
to_depset = _to_depset,
)

View file

@ -0,0 +1,47 @@
load(":private/set.bzl", "set")
def generate_version_macros(ctx, name, version):
"""Generate a version macros header file.
Args:
ctx: Rule context. Needs to define a _version_macros executable attribute.
name: The package name.
version: The package version.
Returns:
Version macros header File.
"""
version_macros_file = ctx.actions.declare_file("{}_version_macros.h".format(name))
ctx.actions.run_shell(
inputs = [ctx.executable._version_macros],
outputs = [version_macros_file],
command = """
"$1" "$2" "$3" > "$4"
""",
arguments = [
ctx.executable._version_macros.path,
name,
version,
version_macros_file.path,
],
)
return version_macros_file
def version_macro_includes(hs_info):
"""Generate a list of version macro header includes.
Args:
hs_info: HaskellInfo provider.
Returns:
(files, flags):
files: Set of version macros header files.
flags: List of C preprocessor flags to include version macros.
"""
files = hs_info.version_macros
flags = [
f
for include in set.to_list(files)
for f in ["-include", include.path]
]
return (files, flags)

View file

@ -0,0 +1,101 @@
#!/usr/bin/env python3
"""Generate Cabal version macros.
Generates the content of a C header file for the given library name and version
and prints it to standard output.
"""
import argparse
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("name", help="The package name.")
parser.add_argument("version", help="The package version.")
args = parser.parse_args()
print(version_macros(args.name, args.version))
def version_macros(name, version):
"""Generate Cabal version macros.
Based on Cabal's version macro generation, see [1].
[1]: http://hackage.haskell.org/package/Cabal-2.4.1.0/docs/src/Distribution.Simple.Build.Macros.html#generatePackageVersionMacros
"""
(major1, major2, minor) = version_components(version)
escaped_name = cpp_escape_name(name)
return "\n".join([
# #define VERSION_pkg "1.2.3"
cpp_ifndef_define(
"VERSION_" + escaped_name,
[],
'"{}"'.format(version),
),
# #define MIN_VERSION_pkg(major1, major2, minor) ...
cpp_ifndef_define(
"MIN_VERSION_" + escaped_name,
["major1", "major2", "minor"],
" \\\n".join([
"(",
" (major1) < {} ||".format(major1),
" (major1) == {} && (major2) < {} ||".format(major1, major2),
" (major1) == {} && (major2) == {} && (minor) <= {} )".format(
major1, major2, minor),
])),
])
def version_components(version):
"""Split version string into major1.major2.minor components."""
components = version.split(".")
num = len(components)
if num < 1:
raise ValueError("version should have at least one component.")
major1 = components[0]
if num >= 2:
major2 = components[1]
else:
major2 = "0"
if num >= 3:
minor = components[2]
else:
minor = "0"
return (major1, major2, minor)
def cpp_escape_name(name):
"""Escape package name to be CPP macro safe."""
return name.replace("-", "_")
def cpp_define(macro, params, val):
"""CPP macro definition, optionally with parameters."""
return "#define {macro}{params} {val}".format(
macro = macro,
params = "({})".format(",".join(params)) if params else "",
val = val,
)
def cpp_ifndef(macro, body):
"""CPP ifndef block."""
return "#ifndef {macro}\n{body}\n#endif /* {macro} */".format(
macro = macro,
body = body,
)
def cpp_ifndef_define(macro, params, val):
"""CPP macro definition, if not previously defined."""
return cpp_ifndef(macro, cpp_define(macro, params, val))
if __name__ == "__main__":
main()