feat(third_party/bazel): Check in rules_haskell from Tweag

This commit is contained in:
Vincent Ambo 2019-07-04 11:18:12 +01:00
parent 2eb1dc26e4
commit f723b8b878
479 changed files with 51484 additions and 0 deletions

View file

@ -0,0 +1,563 @@
"""Actions for compiling Haskell source code"""
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
":private/path_utils.bzl",
"declare_compiled",
"module_name",
"target_unique_name",
)
load(":private/pkg_id.bzl", "pkg_id")
load(":private/version_macros.bzl", "version_macro_includes")
load(
":providers.bzl",
"GhcPluginInfo",
"get_libs_for_ghc_linker",
"merge_HaskellCcInfo",
)
load(":private/set.bzl", "set")
def _process_hsc_file(hs, cc, hsc_flags, hsc_inputs, hsc_file):
"""Process a single hsc file.
Args:
hs: Haskell context.
cc: CcInteropInfo, information about C dependencies.
hsc_flags: extra flags to pass to hsc2hs
hsc_inputs: extra file inputs for the hsc2hs command
hsc_file: hsc file to process.
Returns:
(File, string): Haskell source file created by processing hsc_file and
new import directory containing the produced file.
"""
args = hs.actions.args()
# Output a Haskell source file.
hsc_dir_raw = paths.join("_hsc", hs.name)
hs_out = declare_compiled(hs, hsc_file, ".hs", directory = hsc_dir_raw)
args.add_all([hsc_file.path, "-o", hs_out.path])
args.add_all(["-c", cc.tools.cc])
args.add_all(["-l", cc.tools.cc])
args.add("-ighcplatform.h")
args.add("-ighcversion.h")
args.add_all(["--cflag=" + f for f in cc.cpp_flags])
args.add_all(["--cflag=" + f for f in cc.compiler_flags])
args.add_all(["--cflag=" + f for f in cc.include_args])
args.add_all(["--lflag=" + f for f in cc.linker_flags])
args.add_all(hsc_flags)
# Add an empty PATH variable if not already specified in hs.env.
# Needed to avoid a "Couldn't read PATH" error on Windows.
#
# On Unix platforms, though, we musn't set PATH as it is automatically set up
# by the run action, unless already set in the env parameter. This triggers
# build errors when using GHC bindists on Linux.
if hs.env.get("PATH") == None and hs.toolchain.is_windows:
hs.env["PATH"] = ""
hs.actions.run(
inputs = depset(transitive = [
depset(cc.hdrs),
depset([hsc_file]),
depset(cc.files),
depset(hsc_inputs),
]),
outputs = [hs_out],
mnemonic = "HaskellHsc2hs",
executable = hs.tools.hsc2hs,
arguments = [args],
env = hs.env,
)
idir = paths.join(
hs.bin_dir.path,
hs.label.package,
hsc_dir_raw,
)
return hs_out, idir
def _compilation_defaults(hs, cc, java, dep_info, plugin_dep_info, srcs, import_dir_map, extra_srcs, user_compile_flags, with_profiling, my_pkg_id, version, plugins):
"""Compute variables common to all compilation targets (binary and library).
Returns:
struct with the following fields:
args: default argument list
compile_flags: arguments that were used to compile the package
inputs: default inputs
input_manifests: input manifests
outputs: default outputs
objects_dir: object files directory
interfaces_dir: interface files directory
source_files: set of files that contain Haskell modules
extra_source_files: depset of non-Haskell source files
import_dirs: c2hs Import hierarchy roots
env: default environment variables
"""
compile_flags = []
# GHC expects the CC compiler as the assembler, but segregates the
# set of flags to pass to it when used as an assembler. So we have
# to set both -optc and -opta.
cc_args = [
"-optc" + f
for f in cc.compiler_flags
] + [
"-opta" + f
for f in cc.compiler_flags
]
compile_flags += cc_args
interface_dir_raw = "_iface_prof" if with_profiling else "_iface"
object_dir_raw = "_obj_prof" if with_profiling else "_obj"
# Declare file directories.
#
# NOTE: We could have used -outputdir here and a single output
# directory. But keeping interface and object files separate has
# one advantage: if interface files are invariant under
# a particular code change, then we don't need to rebuild
# downstream.
if my_pkg_id:
# If we're compiling a package, put the interfaces inside the
# package directory.
interfaces_dir = hs.actions.declare_directory(
paths.join(
pkg_id.to_string(my_pkg_id),
interface_dir_raw,
),
)
else:
interfaces_dir = hs.actions.declare_directory(
paths.join(interface_dir_raw, hs.name),
)
objects_dir = hs.actions.declare_directory(
paths.join(object_dir_raw, hs.name),
)
# Default compiler flags.
compile_flags += hs.toolchain.compiler_flags
compile_flags += user_compile_flags
# Work around macOS linker limits. This fix has landed in GHC HEAD, but is
# not yet in a release; plus, we still want to support older versions of
# GHC. For details, see: https://phabricator.haskell.org/D4714
if hs.toolchain.is_darwin:
compile_flags += ["-optl-Wl,-dead_strip_dylibs"]
compile_flags.extend(
pkg_info_to_compile_flags(
expose_packages(
dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = my_pkg_id,
custom_package_databases = None,
version = version,
),
),
)
compile_flags.extend(
pkg_info_to_compile_flags(
expose_packages(
plugin_dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = my_pkg_id,
custom_package_databases = None,
version = version,
),
for_plugin = True,
),
)
header_files = []
boot_files = []
source_files = set.empty()
# Forward all "-D" and "-optP-D" flags to hsc2hs
hsc_flags = []
hsc_flags += ["--cflag=" + x for x in user_compile_flags if x.startswith("-D")]
hsc_flags += ["--cflag=" + x[len("-optP"):] for x in user_compile_flags if x.startswith("-optP-D")]
hsc_inputs = []
if version:
(version_macro_headers, version_macro_flags) = version_macro_includes(dep_info)
hsc_flags += ["--cflag=" + x for x in version_macro_flags]
hsc_inputs += set.to_list(version_macro_headers)
# Add import hierarchy root.
# Note that this is not perfect, since GHC requires hs-boot files
# to be in the same directory as the corresponding .hs file. Thus
# the two must both have the same root; i.e., both plain files,
# both in bin_dir, or both in genfiles_dir.
import_dirs = set.from_list([
hs.src_root,
paths.join(hs.bin_dir.path, hs.src_root),
paths.join(hs.genfiles_dir.path, hs.src_root),
])
for s in srcs:
if s.extension == "h":
header_files.append(s)
elif s.extension == "hsc":
s0, idir = _process_hsc_file(hs, cc, hsc_flags, hsc_inputs, s)
set.mutable_insert(source_files, s0)
set.mutable_insert(import_dirs, idir)
elif s.extension in ["hs-boot", "lhs-boot"]:
boot_files.append(s)
else:
set.mutable_insert(source_files, s)
if s in import_dir_map:
idir = import_dir_map[s]
set.mutable_insert(import_dirs, idir)
compile_flags += ["-i{0}".format(d) for d in set.to_list(import_dirs)]
# Write the -optP flags to a parameter file because they can be very long on Windows
# e.g. 27Kb for grpc-haskell
# Equivalent to: compile_flags += ["-optP" + f for f in cc.cpp_flags]
optp_args_file = hs.actions.declare_file("optp_args_%s" % hs.name)
optp_args = hs.actions.args()
optp_args.add_all(cc.cpp_flags)
optp_args.set_param_file_format("multiline")
hs.actions.write(optp_args_file, optp_args)
compile_flags += ["-optP@" + optp_args_file.path]
compile_flags += cc.include_args
locale_archive_depset = (
depset([hs.toolchain.locale_archive]) if hs.toolchain.locale_archive != None else depset()
)
# This is absolutely required otherwise GHC doesn't know what package it's
# creating `Name`s for to put them in Haddock interface files which then
# results in Haddock not being able to find names for linking in
# environment after reading its interface file later.
if my_pkg_id != None:
unit_id_args = [
"-this-unit-id",
pkg_id.to_string(my_pkg_id),
"-optP-DCURRENT_PACKAGE_KEY=\"{}\"".format(pkg_id.to_string(my_pkg_id)),
]
compile_flags += unit_id_args
args = hs.actions.args()
# Compilation mode. Allow rule-supplied compiler flags to override it.
if hs.mode == "opt":
args.add("-O2")
args.add("-static")
if with_profiling:
args.add("-prof", "-fexternal-interpreter")
# Common flags
args.add_all([
"-v0",
"-no-link",
"-fPIC",
"-hide-all-packages",
# Should never trigger in sandboxed builds, but can be useful
# to debug issues in non-sandboxed builds.
"-Wmissing-home-modules",
])
# Output directories
args.add_all([
"-odir",
objects_dir.path,
"-hidir",
interfaces_dir.path,
])
# Interface files with profiling have to have the extension "p_hi":
# https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/packages.html#installedpackageinfo-a-package-specification
# otherwise we won't be able to register them with ghc-pkg.
if with_profiling:
args.add_all([
"-hisuf",
"p_hi",
"-osuf",
"p_o",
])
args.add_all(compile_flags)
# Plugins
for plugin in plugins:
args.add("-fplugin={}".format(plugin[GhcPluginInfo].module))
for opt in plugin[GhcPluginInfo].args:
args.add_all(["-fplugin-opt", "{}:{}".format(plugin[GhcPluginInfo].module, opt)])
plugin_tool_inputs = [plugin[GhcPluginInfo].tool_inputs for plugin in plugins]
plugin_tool_input_manifests = [
manifest
for plugin in plugins
for manifest in plugin[GhcPluginInfo].tool_input_manifests
]
# Pass source files
for f in set.to_list(source_files):
args.add(f)
extra_source_files = depset(
transitive = [extra_srcs, depset(header_files), depset(boot_files)],
)
# Transitive library dependencies for runtime.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
merge_HaskellCcInfo(
dep_info.transitive_cc_dependencies,
plugin_dep_info.transitive_cc_dependencies,
),
)
return struct(
args = args,
compile_flags = compile_flags,
inputs = depset(transitive = [
depset(header_files),
depset(boot_files),
set.to_depset(source_files),
extra_source_files,
depset(cc.hdrs),
set.to_depset(dep_info.package_databases),
set.to_depset(dep_info.interface_dirs),
depset(dep_info.static_libraries),
depset(dep_info.static_libraries_prof),
set.to_depset(dep_info.dynamic_libraries),
set.to_depset(plugin_dep_info.package_databases),
set.to_depset(plugin_dep_info.interface_dirs),
depset(plugin_dep_info.static_libraries),
depset(plugin_dep_info.static_libraries_prof),
set.to_depset(plugin_dep_info.dynamic_libraries),
depset(library_deps),
depset(ld_library_deps),
java.inputs,
locale_archive_depset,
depset(transitive = plugin_tool_inputs),
depset([optp_args_file]),
]),
input_manifests = plugin_tool_input_manifests,
objects_dir = objects_dir,
interfaces_dir = interfaces_dir,
outputs = [objects_dir, interfaces_dir],
source_files = source_files,
extra_source_files = depset(transitive = [extra_source_files, depset([optp_args_file])]),
import_dirs = import_dirs,
env = dicts.add(
ghc_env,
java.env,
hs.env,
),
)
def _hpc_compiler_args(hs):
hpcdir = "{}/{}/.hpc".format(hs.bin_dir.path, hs.package_root)
return ["-fhpc", "-hpcdir", hpcdir]
def _coverage_datum(mix_file, src_file, target_label):
return struct(
mix_file = mix_file,
src_file = src_file,
target_label = target_label,
)
def compile_binary(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs,
ls_modules,
import_dir_map,
extra_srcs,
user_compile_flags,
dynamic,
with_profiling,
main_function,
version,
inspect_coverage = False,
plugins = []):
"""Compile a Haskell target into object files suitable for linking.
Returns:
struct with the following fields:
object_files: list of static object files
object_dyn_files: list of dynamic object files
modules: set of module names
source_files: set of Haskell source files
"""
c = _compilation_defaults(hs, cc, java, dep_info, plugin_dep_info, srcs, import_dir_map, extra_srcs, user_compile_flags, with_profiling, my_pkg_id = None, version = version, plugins = plugins)
c.args.add_all(["-main-is", main_function])
if dynamic:
# For binaries, GHC creates .o files even for code to be
# linked dynamically. So we have to force the object suffix to
# be consistent with the dynamic object suffix in the library
# case.
c.args.add_all(["-dynamic", "-osuf dyn_o"])
coverage_data = []
if inspect_coverage:
c.args.add_all(_hpc_compiler_args(hs))
for src_file in srcs:
module = module_name(hs, src_file)
mix_file = hs.actions.declare_file(".hpc/{module}.mix".format(module = module))
coverage_data.append(_coverage_datum(mix_file, src_file, hs.label))
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = c.inputs,
input_manifests = c.input_manifests,
outputs = c.outputs + [datum.mix_file for datum in coverage_data],
mnemonic = "HaskellBuildBinary" + ("Prof" if with_profiling else ""),
progress_message = "HaskellBuildBinary {}".format(hs.label),
env = c.env,
arguments = c.args,
)
if with_profiling:
exposed_modules_file = None
else:
exposed_modules_file = hs.actions.declare_file(
target_unique_name(hs, "exposed-modules"),
)
hs.actions.run(
inputs = [c.interfaces_dir, hs.toolchain.global_pkg_db],
outputs = [exposed_modules_file],
executable = ls_modules,
arguments = [
c.interfaces_dir.path,
hs.toolchain.global_pkg_db.path,
"/dev/null", # no hidden modules
"/dev/null", # no reexported modules
exposed_modules_file.path,
],
use_default_shell_env = True,
)
return struct(
objects_dir = c.objects_dir,
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
compile_flags = c.compile_flags,
exposed_modules_file = exposed_modules_file,
coverage_data = coverage_data,
)
def compile_library(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs,
ls_modules,
other_modules,
exposed_modules_reexports,
import_dir_map,
extra_srcs,
user_compile_flags,
with_shared,
with_profiling,
my_pkg_id,
plugins = []):
"""Build arguments for Haskell package build.
Returns:
struct with the following fields:
interfaces_dir: directory containing interface files
interface_files: list of interface files
object_files: list of static object files
object_dyn_files: list of dynamic object files
compile_flags: list of string arguments suitable for Haddock
modules: set of module names
source_files: set of Haskell module files
import_dirs: import directories that should make all modules visible (for GHCi)
"""
c = _compilation_defaults(hs, cc, java, dep_info, plugin_dep_info, srcs, import_dir_map, extra_srcs, user_compile_flags, with_profiling, my_pkg_id = my_pkg_id, version = my_pkg_id.version, plugins = plugins)
if with_shared:
c.args.add("-dynamic-too")
coverage_data = []
if hs.coverage_enabled:
c.args.add_all(_hpc_compiler_args(hs))
for src_file in srcs:
pkg_id_string = pkg_id.to_string(my_pkg_id)
module = module_name(hs, src_file)
mix_file = hs.actions.declare_file(".hpc/{pkg}/{module}.mix".format(pkg = pkg_id_string, module = module))
coverage_data.append(_coverage_datum(mix_file, src_file, hs.label))
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = c.inputs,
input_manifests = c.input_manifests,
outputs = c.outputs + [datum.mix_file for datum in coverage_data],
mnemonic = "HaskellBuildLibrary" + ("Prof" if with_profiling else ""),
progress_message = "HaskellBuildLibrary {}".format(hs.label),
env = c.env,
arguments = c.args,
)
if with_profiling:
exposed_modules_file = None
else:
hidden_modules_file = hs.actions.declare_file(
target_unique_name(hs, "hidden-modules"),
)
hs.actions.write(
output = hidden_modules_file,
content = ", ".join(other_modules),
)
reexported_modules_file = hs.actions.declare_file(
target_unique_name(hs, "reexported-modules"),
)
hs.actions.write(
output = reexported_modules_file,
content = ", ".join(exposed_modules_reexports),
)
exposed_modules_file = hs.actions.declare_file(
target_unique_name(hs, "exposed-modules"),
)
hs.actions.run(
inputs = [
c.interfaces_dir,
hs.toolchain.global_pkg_db,
hidden_modules_file,
reexported_modules_file,
],
outputs = [exposed_modules_file],
executable = ls_modules,
arguments = [
c.interfaces_dir.path,
hs.toolchain.global_pkg_db.path,
hidden_modules_file.path,
reexported_modules_file.path,
exposed_modules_file.path,
],
use_default_shell_env = True,
)
return struct(
interfaces_dir = c.interfaces_dir,
objects_dir = c.objects_dir,
compile_flags = c.compile_flags,
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
exposed_modules_file = exposed_modules_file,
coverage_data = coverage_data,
)

View file

@ -0,0 +1,667 @@
"""Actions for linking object code produced by compilation"""
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"is_static_library",
"ln",
)
load(":private/pkg_id.bzl", "pkg_id")
load(":private/set.bzl", "set")
load(":private/list.bzl", "list")
# tests in /tests/unit_tests/BUILD
def parent_dir_path(path):
"""Returns the path of the parent directory.
For a relative path with just a file, "." is returned.
The path is not normalized.
foo => .
foo/ => foo
foo/bar => foo
foo/bar/baz => foo/bar
foo/../bar => foo/..
Args:
a path string
Returns:
A path list of the form `["foo", "bar"]`
"""
path_dir = paths.dirname(path)
# dirname returns "" if there is no parent directory
# In that case we return the identity path, which is ".".
if path_dir == "":
return ["."]
else:
return path_dir.split("/")
def __check_dots(target, path):
# theres still (non-leading) .. in split
if ".." in path:
fail("the short_path of target {} (which is {}) contains more dots than loading `../`. We cant handle that.".format(
target,
target.short_path,
))
# skylark doesnt allow nested defs, which is a mystery.
def _get_target_parent_dir(target):
"""get the parent dir and handle leading short_path dots,
which signify that the target is in an external repository.
Args:
target: a target, .short_path is used
Returns:
(is_external, parent_dir)
`is_external`: Bool whether the path points to an external repository
`parent_dir`: The parent directory, either up to the runfiles toplel,
up to the external repository toplevel.
Is `[]` if there is no parent dir.
"""
parent_dir = parent_dir_path(target.short_path)
if parent_dir[0] == "..":
__check_dots(target, parent_dir[1:])
return (True, parent_dir[1:])
elif parent_dir[0] == ".":
return (False, [])
else:
__check_dots(target, parent_dir)
return (False, parent_dir)
# tests in /tests/unit_tests/BUILD
def create_rpath_entry(
binary,
dependency,
keep_filename,
prefix = ""):
"""Return a (relative) path that points from `binary` to `dependecy`
while not leaving the current bazel runpath, taking into account weird
corner cases of `.short_path` concerning external repositories.
The resulting entry should be able to be inserted into rpath or similar.
Examples:
bin.short_path=foo/a.so and dep.short_path=bar/b.so
=> create_rpath_entry(bin, dep, False) = ../bar
and
create_rpath_entry(bin, dep, True) = ../bar/b.so
and
create_rpath_entry(bin, dep, True, "$ORIGIN") = $ORIGIN/../bar/b.so
Args:
binary: target of current binary
dependency: target of dependency to relatively point to
keep_filename: whether to point to the filename or its parent dir
prefix: string path prefix to add before the relative path
Returns:
relative path string
"""
(bin_is_external, bin_parent_dir) = _get_target_parent_dir(binary)
(dep_is_external, dep_parent_dir) = _get_target_parent_dir(dependency)
# backup through parent directories of the binary,
# to the runfiles directory
bin_backup = [".."] * len(bin_parent_dir)
# external repositories live in `target.runfiles/external`,
# while the internal repository lives in `target.runfiles`.
# The `.short_path`s of external repositories are strange,
# they start with `../`, but you cannot just append that in
# order to find the correct runpath. Instead you have to use
# the following logic to construct the correct runpaths:
if bin_is_external:
if dep_is_external:
# stay in `external`
path_segments = bin_backup
else:
# backup out of `external`
path_segments = [".."] + bin_backup
elif dep_is_external:
# go into `external`
path_segments = bin_backup + ["external"]
else:
# no special external traversal
path_segments = bin_backup
# then add the parent dir to our dependency
path_segments.extend(dep_parent_dir)
# optionally add the filename
if keep_filename:
path_segments.append(
paths.basename(dependency.short_path),
)
# normalize for good measure and create the final path
path = paths.normalize("/".join(path_segments))
# and add the prefix if applicable
if prefix == "":
return path
else:
return prefix + "/" + path
def _merge_parameter_files(hs, file1, file2):
"""Merge two GHC parameter files into one.
Args:
hs: Haskell context.
file1: The first parameter file.
file2: The second parameter file.
Returns:
File: A new parameter file containing the parameters of both input files.
The file name is based on the file names of the input files. The file
is located next to the first input file.
"""
params_file = hs.actions.declare_file(
file1.basename + ".and." + file2.basename,
sibling = file1,
)
hs.actions.run_shell(
inputs = [file1, file2],
outputs = [params_file],
command = """
cat {file1} {file2} > {out}
""".format(
file1 = file1.path,
file2 = file2.path,
out = params_file.path,
),
)
return params_file
def _darwin_create_extra_linker_flags_file(hs, cc, objects_dir, executable, dynamic, solibs):
"""Write additional linker flags required on MacOS to a parameter file.
Args:
hs: Haskell context.
cc: CcInteropInfo, information about C dependencies.
objects_dir: Directory storing object files.
Used to determine output file location.
executable: The executable being built.
dynamic: Bool: Whether to link dynamically or statically.
solibs: List of dynamic library dependencies.
Returns:
File: Parameter file with additional linker flags. To be passed to GHC.
"""
# On Darwin GHC will pass the dead_strip_dylibs flag to the linker. This
# flag will remove any shared library loads from the binary's header that
# are not directly resolving undefined symbols in the binary. I.e. any
# indirect shared library dependencies will be removed. This conflicts with
# Bazel's builtin cc rules, which assume that the final binary will load
# all transitive shared library dependencies. In particlar shared libraries
# produced by Bazel's cc rules never load shared libraries themselves. This
# causes missing symbols at runtime on MacOS, see #170.
#
# The following work-around applies the `-u` flag to the linker for any
# symbol that is undefined in any transitive shared library dependency.
# This forces the linker to resolve these undefined symbols in all
# transitive shared library dependencies and keep the corresponding load
# commands in the binary's header.
#
# Unfortunately, this prohibits elimination of any truly redundant shared
# library dependencies. Furthermore, the transitive closure of shared
# library dependencies can be large, so this makes it more likely to exceed
# the MACH-O header size limit on MacOS.
#
# This is a horrendous hack, but it seems to be forced on us by how Bazel
# builds dynamic cc libraries.
suffix = ".dynamic.linker_flags" if dynamic else ".static.linker_flags"
linker_flags_file = hs.actions.declare_file(
executable.basename + suffix,
sibling = objects_dir,
)
hs.actions.run_shell(
inputs = solibs,
outputs = [linker_flags_file],
command = """
touch {out}
for lib in {solibs}; do
{nm} -u "$lib" | sed 's/^/-optl-Wl,-u,/' >> {out}
done
""".format(
nm = cc.tools.nm,
solibs = " ".join(["\"" + l.path + "\"" for l in solibs]),
out = linker_flags_file.path,
),
)
return linker_flags_file
def _create_objects_dir_manifest(hs, objects_dir, dynamic, with_profiling):
suffix = ".dynamic.manifest" if dynamic else ".static.manifest"
objects_dir_manifest = hs.actions.declare_file(
objects_dir.basename + suffix,
sibling = objects_dir,
)
if with_profiling:
ext = "p_o"
elif dynamic:
ext = "dyn_o"
else:
ext = "o"
hs.actions.run_shell(
inputs = [objects_dir],
outputs = [objects_dir_manifest],
command = """
find {dir} -name '*.{ext}' > {out}
""".format(
dir = objects_dir.path,
ext = ext,
out = objects_dir_manifest.path,
),
use_default_shell_env = True,
)
return objects_dir_manifest
def _link_dependencies(hs, dep_info, dynamic, binary, args):
"""Configure linker flags and inputs.
Configure linker flags for C library dependencies and runtime dynamic
library dependencies. And collect the C libraries to pass as inputs to
the linking action.
Args:
hs: Haskell context.
dep_info: HaskellInfo provider.
dynamic: Bool: Whether to link dynamically, or statically.
binary: Final linked binary.
args: Arguments to the linking action.
Returns:
depset: C library dependencies to provide as input to the linking action.
"""
# Pick linking context based on linking mode.
if dynamic:
link_ctx = dep_info.cc_dependencies.dynamic_linking
trans_link_ctx = dep_info.transitive_cc_dependencies.dynamic_linking
else:
link_ctx = dep_info.cc_dependencies.static_linking
trans_link_ctx = dep_info.transitive_cc_dependencies.static_linking
# Direct C library dependencies to link.
# I.e. not indirect through another Haskell dependency.
# Such indirect dependencies are linked by GHC based on the extra-libraries
# fields in the dependency's package configuration file.
libs_to_link = link_ctx.libraries_to_link.to_list()
_add_external_libraries(args, libs_to_link)
# Transitive library dependencies to have in scope for linking.
trans_libs_to_link = trans_link_ctx.libraries_to_link.to_list()
# Libraries to pass as inputs to linking action.
cc_link_libs = depset(transitive = [
depset(trans_libs_to_link),
])
# Transitive dynamic library dependencies to have in RUNPATH.
cc_solibs = trans_link_ctx.dynamic_libraries_for_runtime.to_list()
# Collect Haskell dynamic library dependencies in common RUNPATH.
# This is to keep the number of RUNPATH entries low, for faster loading
# and to avoid exceeding the MACH-O header size limit on MacOS.
hs_solibs = []
if dynamic:
hs_solibs_prefix = "_hssolib_%s" % hs.name
for dep in set.to_list(dep_info.dynamic_libraries):
dep_link = hs.actions.declare_file(
paths.join(hs_solibs_prefix, dep.basename),
sibling = binary,
)
ln(hs, dep, dep_link)
hs_solibs.append(dep_link)
# Configure RUNPATH.
rpaths = _infer_rpaths(
hs.toolchain.is_darwin,
binary,
trans_link_ctx.dynamic_libraries_for_runtime.to_list() +
hs_solibs,
)
for rpath in set.to_list(rpaths):
args.add("-optl-Wl,-rpath," + rpath)
return (cc_link_libs, cc_solibs, hs_solibs)
def link_binary(
hs,
cc,
dep_info,
extra_srcs,
compiler_flags,
objects_dir,
dynamic,
with_profiling,
version):
"""Link Haskell binary from static object files.
Returns:
File: produced executable
"""
exe_name = hs.name + (".exe" if hs.toolchain.is_windows else "")
executable = hs.actions.declare_file(exe_name)
args = hs.actions.args()
args.add_all(["-optl" + f for f in cc.linker_flags])
if with_profiling:
args.add("-prof")
args.add_all(hs.toolchain.compiler_flags)
args.add_all(compiler_flags)
# By default, GHC will produce mostly-static binaries, i.e. in which all
# Haskell code is statically linked and foreign libraries and system
# dependencies are dynamically linked. If linkstatic is false, i.e. the user
# has requested fully dynamic linking, we must therefore add flags to make
# sure that GHC dynamically links Haskell code too. The one exception to
# this is when we are compiling for profiling, which currently does not play
# nicely with dynamic linking.
if dynamic:
if with_profiling:
print("WARNING: dynamic linking and profiling don't mix. Omitting -dynamic.\nSee https://ghc.haskell.org/trac/ghc/ticket/15394")
else:
args.add_all(["-pie", "-dynamic"])
# When compiling with `-threaded`, GHC needs to link against
# the pthread library when linking against static archives (.a).
# We assume its not a problem to pass it for other cases,
# so we just default to passing it.
args.add("-optl-pthread")
args.add_all(["-o", executable.path])
# De-duplicate optl calls while preserving ordering: we want last
# invocation of an object to remain last. That is `-optl foo -optl
# bar -optl foo` becomes `-optl bar -optl foo`. Do this by counting
# number of occurrences. That way we only build dict and add to args
# directly rather than doing multiple reversals with temporary
# lists.
args.add_all(pkg_info_to_compile_flags(expose_packages(
dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = None,
custom_package_databases = None,
version = version,
)))
(cc_link_libs, cc_solibs, hs_solibs) = _link_dependencies(
hs = hs,
dep_info = dep_info,
dynamic = dynamic,
binary = executable,
args = args,
)
# XXX: Suppress a warning that Clang prints due to GHC automatically passing
# "-pie" or "-no-pie" to the C compiler.
# This is linked to https://ghc.haskell.org/trac/ghc/ticket/15319
args.add_all([
"-optc-Wno-unused-command-line-argument",
"-optl-Wno-unused-command-line-argument",
])
objects_dir_manifest = _create_objects_dir_manifest(
hs,
objects_dir,
dynamic = dynamic,
with_profiling = with_profiling,
)
extra_linker_flags_file = None
if hs.toolchain.is_darwin:
args.add("-optl-Wl,-headerpad_max_install_names")
# Nixpkgs commit 3513034208a introduces -liconv in NIX_LDFLAGS on
# Darwin. We don't currently handle NIX_LDFLAGS in any special
# way, so a hack is to simply do what NIX_LDFLAGS is telling us we
# should do always when using a toolchain from Nixpkgs.
# TODO remove this gross hack.
args.add("-liconv")
extra_linker_flags_file = _darwin_create_extra_linker_flags_file(
hs,
cc,
objects_dir,
executable,
dynamic,
cc_solibs,
)
if extra_linker_flags_file != None:
params_file = _merge_parameter_files(hs, objects_dir_manifest, extra_linker_flags_file)
else:
params_file = objects_dir_manifest
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = depset(transitive = [
depset(extra_srcs),
set.to_depset(dep_info.package_databases),
set.to_depset(dep_info.dynamic_libraries),
depset(dep_info.static_libraries),
depset(dep_info.static_libraries_prof),
depset([objects_dir]),
cc_link_libs,
]),
outputs = [executable],
mnemonic = "HaskellLinkBinary",
arguments = args,
params_file = params_file,
)
return (executable, cc_solibs + hs_solibs)
def _add_external_libraries(args, ext_libs):
"""Add options to `args` that allow us to link to `ext_libs`.
Args:
args: Args object.
ext_libs: C library dependencies.
"""
# Deduplicate the list of ext_libs based on their
# library name (file name stripped of lib prefix and endings).
# This keeps the command lines short, e.g. when a C library
# like `liblz4.so` appears in multiple dependencies.
# XXX: this is only done in here
# Shouldnt the deduplication be applied to *all* external libraries?
deduped = list.dedup_on(get_lib_name, ext_libs)
for lib in deduped:
args.add_all([
"-L{0}".format(
paths.dirname(lib.path),
),
"-l{0}".format(
# technically this is the second call to get_lib_name,
# but the added clarity makes up for it.
get_lib_name(lib),
),
])
def _infer_rpaths(is_darwin, target, solibs):
"""Return set of RPATH values to be added to target so it can find all
solibs
The resulting paths look like:
$ORIGIN/../../path/to/solib/dir
This means: "go upwards to your runfiles directory, then descend into
the parent folder of the solib".
Args:
is_darwin: Whether we're compiling on and for Darwin.
target: File, executable or library we're linking.
solibs: A list of Files, shared objects that the target needs.
Returns:
Set of strings: rpaths to add to target.
"""
r = set.empty()
if is_darwin:
prefix = "@loader_path"
else:
prefix = "$ORIGIN"
for solib in solibs:
rpath = create_rpath_entry(
binary = target,
dependency = solib,
keep_filename = False,
prefix = prefix,
)
set.mutable_insert(r, rpath)
return r
def _so_extension(hs):
"""Returns the extension for shared libraries.
Args:
hs: Haskell rule context.
Returns:
string of extension.
"""
return "dylib" if hs.toolchain.is_darwin else "so"
def link_library_static(hs, cc, dep_info, objects_dir, my_pkg_id, with_profiling):
"""Link a static library for the package using given object files.
Returns:
File: Produced static library.
"""
static_library = hs.actions.declare_file(
"lib{0}.a".format(pkg_id.library_name(hs, my_pkg_id, prof_suffix = with_profiling)),
)
objects_dir_manifest = _create_objects_dir_manifest(
hs,
objects_dir,
dynamic = False,
with_profiling = with_profiling,
)
args = hs.actions.args()
inputs = [objects_dir, objects_dir_manifest] + cc.files
if hs.toolchain.is_darwin:
# On Darwin, ar doesn't support params files.
args.add_all([
static_library,
objects_dir_manifest.path,
])
# TODO Get ar location from the CC toolchain. This is
# complicated by the fact that the CC toolchain does not
# always use ar, and libtool has an entirely different CLI.
# See https://github.com/bazelbuild/bazel/issues/5127
hs.actions.run_shell(
inputs = inputs,
outputs = [static_library],
mnemonic = "HaskellLinkStaticLibrary",
command = "{ar} qc $1 $(< $2)".format(ar = cc.tools.ar),
arguments = [args],
# Use the default macosx toolchain
env = {"SDKROOT": "macosx"},
)
else:
args.add_all([
"qc",
static_library,
"@" + objects_dir_manifest.path,
])
hs.actions.run(
inputs = inputs,
outputs = [static_library],
mnemonic = "HaskellLinkStaticLibrary",
executable = cc.tools.ar,
arguments = [args],
)
return static_library
def link_library_dynamic(hs, cc, dep_info, extra_srcs, objects_dir, my_pkg_id):
"""Link a dynamic library for the package using given object files.
Returns:
File: Produced dynamic library.
"""
dynamic_library = hs.actions.declare_file(
"lib{0}-ghc{1}.{2}".format(
pkg_id.library_name(hs, my_pkg_id),
hs.toolchain.version,
_so_extension(hs),
),
)
args = hs.actions.args()
args.add_all(["-optl" + f for f in cc.linker_flags])
args.add_all(["-shared", "-dynamic"])
# Work around macOS linker limits. This fix has landed in GHC HEAD, but is
# not yet in a release; plus, we still want to support older versions of
# GHC. For details, see: https://phabricator.haskell.org/D4714
if hs.toolchain.is_darwin:
args.add("-optl-Wl,-dead_strip_dylibs")
args.add_all(pkg_info_to_compile_flags(expose_packages(
dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = None,
custom_package_databases = None,
version = my_pkg_id.version if my_pkg_id else None,
)))
(cc_link_libs, _cc_solibs, _hs_solibs) = _link_dependencies(
hs = hs,
dep_info = dep_info,
dynamic = True,
binary = dynamic_library,
args = args,
)
args.add_all(["-o", dynamic_library.path])
# Profiling not supported for dynamic libraries.
objects_dir_manifest = _create_objects_dir_manifest(
hs,
objects_dir,
dynamic = True,
with_profiling = False,
)
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = depset([objects_dir], transitive = [
depset(extra_srcs),
set.to_depset(dep_info.package_databases),
set.to_depset(dep_info.dynamic_libraries),
cc_link_libs,
]),
outputs = [dynamic_library],
mnemonic = "HaskellLinkDynamicLibrary",
arguments = args,
params_file = objects_dir_manifest,
)
return dynamic_library

View file

@ -0,0 +1,210 @@
"""Action for creating packages and registering them with ghc-pkg"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(":private/path_utils.bzl", "target_unique_name")
load(":private/pkg_id.bzl", "pkg_id")
load(":private/set.bzl", "set")
load(":private/path_utils.bzl", "get_lib_name")
def _get_extra_libraries(dep_info):
"""Get directories and library names for extra library dependencies.
Args:
dep_info: HaskellInfo provider of the package.
Returns:
(dirs, libs):
dirs: list: Library search directories for extra library dependencies.
libs: list: Extra library dependencies.
"""
cc_libs = dep_info.cc_dependencies.dynamic_linking.libraries_to_link.to_list()
# The order in which library dependencies are listed is relevant when
# linking static archives. To maintain the order defined by the input
# depset we collect the library dependencies in a list, and use a separate
# set to deduplicate entries.
seen_libs = set.empty()
extra_libs = []
extra_lib_dirs = set.empty()
for lib in cc_libs:
lib_name = get_lib_name(lib)
if not set.is_member(seen_libs, lib_name):
set.mutable_insert(seen_libs, lib_name)
extra_libs.append(lib_name)
set.mutable_insert(extra_lib_dirs, lib.dirname)
return (set.to_list(extra_lib_dirs), extra_libs)
def package(
hs,
dep_info,
interfaces_dir,
interfaces_dir_prof,
static_library,
dynamic_library,
exposed_modules_file,
other_modules,
my_pkg_id,
static_library_prof):
"""Create GHC package using ghc-pkg.
Args:
hs: Haskell context.
interfaces_dir: Directory containing interface files.
static_library: Static library of the package.
dynamic_library: Dynamic library of the package.
static_library_prof: Static library compiled with profiling or None.
Returns:
(File, File): GHC package conf file, GHC package cache file
"""
pkg_db_dir = pkg_id.to_string(my_pkg_id)
conf_file = hs.actions.declare_file(
paths.join(pkg_db_dir, "{0}.conf".format(pkg_db_dir)),
)
cache_file = hs.actions.declare_file("package.cache", sibling = conf_file)
import_dir = paths.join(
"${pkgroot}",
paths.join(pkg_db_dir, "_iface"),
)
interfaces_dirs = [interfaces_dir]
if interfaces_dir_prof != None:
import_dir_prof = paths.join(
"${pkgroot}",
paths.join(pkg_db_dir, "_iface_prof"),
)
interfaces_dirs.append(interfaces_dir_prof)
else:
import_dir_prof = ""
(extra_lib_dirs, extra_libs) = _get_extra_libraries(dep_info)
metadata_entries = {
"name": my_pkg_id.name,
"version": my_pkg_id.version,
"id": pkg_id.to_string(my_pkg_id),
"key": pkg_id.to_string(my_pkg_id),
"exposed": "True",
"hidden-modules": " ".join(other_modules),
"import-dirs": " ".join([import_dir, import_dir_prof]),
"library-dirs": " ".join(["${pkgroot}"] + extra_lib_dirs),
"dynamic-library-dirs": " ".join(["${pkgroot}"] + extra_lib_dirs),
"hs-libraries": pkg_id.library_name(hs, my_pkg_id),
"extra-libraries": " ".join(extra_libs),
"depends": ", ".join(
# Prebuilt dependencies are added further down, since their
# package-ids are not available as strings but in build outputs.
set.to_list(dep_info.package_ids),
),
}
# Create a file from which ghc-pkg will create the actual package
# from. List of exposed modules generated below.
metadata_file = hs.actions.declare_file(target_unique_name(hs, "metadata"))
hs.actions.write(
output = metadata_file,
content = "\n".join([
"{0}: {1}".format(k, v)
for k, v in metadata_entries.items()
if v
]) + "\n",
)
# Collect the package id files of all prebuilt dependencies.
prebuilt_deps_id_files = [
dep.id_file
for dep in set.to_list(dep_info.prebuilt_dependencies)
]
# Combine exposed modules and other metadata to form the package
# configuration file.
prebuilt_deps_args = hs.actions.args()
prebuilt_deps_args.add_all([f.path for f in prebuilt_deps_id_files])
prebuilt_deps_args.use_param_file("%s", use_always = True)
prebuilt_deps_args.set_param_file_format("multiline")
hs.actions.run_shell(
inputs = [metadata_file, exposed_modules_file] + prebuilt_deps_id_files,
outputs = [conf_file],
command = """
cat $1 > $4
echo "exposed-modules: `cat $2`" >> $4
# this is equivalent to 'readarray'. We do use 'readarray' in order to
# support older bash versions.
while IFS= read -r line; do deps_id_files+=("$line"); done < $3
if [ ${#deps_id_files[@]} -eq 0 ]; then
deps=""
else
deps=$(cat "${deps_id_files[@]}" | tr '\n' " ")
fi
echo "depends: $deps" >> $4
""",
arguments = [
metadata_file.path,
exposed_modules_file.path,
prebuilt_deps_args,
conf_file.path,
],
use_default_shell_env = True,
)
# Make the call to ghc-pkg and use the package configuration file
package_path = ":".join([c.dirname for c in set.to_list(dep_info.package_databases)]) + ":"
hs.actions.run(
inputs = depset(transitive = [
set.to_depset(dep_info.package_databases),
depset(interfaces_dirs),
depset([
input
for input in [
static_library,
conf_file,
dynamic_library,
static_library_prof,
]
if input
]),
]),
outputs = [cache_file],
env = {
"GHC_PACKAGE_PATH": package_path,
},
mnemonic = "HaskellRegisterPackage",
progress_message = "HaskellRegisterPackage {}".format(hs.label),
executable = hs.tools.ghc_pkg,
# Registration of a new package consists in,
#
# 1. copying the registration file into the package db,
# 2. performing some validation on the registration file content,
# 3. recaching, i.e. regenerating the package db cache file.
#
# Normally, this is all done by `ghc-pkg register`. But in our
# case, `ghc-pkg register` is painful, because the validation
# it performs is slow, somewhat redundant but especially, too
# strict (see e.g.
# https://ghc.haskell.org/trac/ghc/ticket/15478). So we do (1)
# and (3) manually, by copying then calling `ghc-pkg recache`
# directly.
#
# The downside is that we do lose the few validations that
# `ghc-pkg register` was doing that was useful. e.g. when
# reexporting modules, validation checks that the source
# module does exist.
#
# TODO Go back to using `ghc-pkg register`. Blocked by
# https://ghc.haskell.org/trac/ghc/ticket/15478
arguments = [
"recache",
"--package-db={0}".format(conf_file.dirname),
"-v0",
"--no-expand-pkgroot",
],
# XXX: Seems required for this to work on Windows
use_default_shell_env = True,
)
return conf_file, cache_file

View file

@ -0,0 +1,175 @@
"""GHCi REPL support"""
load(":private/context.bzl", "render_env")
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"link_libraries",
"ln",
"target_unique_name",
)
load(":providers.bzl", "get_libs_for_ghc_linker")
load(
":private/set.bzl",
"set",
)
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//lib:shell.bzl", "shell")
def build_haskell_repl(
hs,
ghci_script,
ghci_repl_wrapper,
user_compile_flags,
repl_ghci_args,
hs_info,
output,
package_databases,
version,
lib_info = None):
"""Build REPL script.
Args:
hs: Haskell context.
hs_info: HaskellInfo.
package_databases: package caches excluding the cache file of the package
we're creating a REPL for.
lib_info: If we're building REPL for a library target, pass
HaskellLibraryInfo here, otherwise it should be None.
Returns:
None.
"""
# The base and directory packages are necessary for the GHCi script we use
# (loads source files and brings in scope the corresponding modules).
args = ["-package", "base", "-package", "directory"]
pkg_ghc_info = expose_packages(
hs_info,
lib_info,
use_direct = False,
use_my_pkg_id = None,
custom_package_databases = package_databases,
version = version,
)
args += pkg_info_to_compile_flags(pkg_ghc_info)
lib_imports = []
if lib_info != None:
for idir in set.to_list(hs_info.import_dirs):
args += ["-i{0}".format(idir)]
lib_imports.append(idir)
link_ctx = hs_info.cc_dependencies.dynamic_linking
libs_to_link = link_ctx.dynamic_libraries_for_runtime.to_list()
# External C libraries that we need to make available to the REPL.
libraries = link_libraries(libs_to_link, args)
# Transitive library dependencies to have in runfiles.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
hs_info.transitive_cc_dependencies,
path_prefix = "$RULES_HASKELL_EXEC_ROOT",
)
library_path = [paths.dirname(lib.path) for lib in library_deps]
ld_library_path = [paths.dirname(lib.path) for lib in ld_library_deps]
repl_file = hs.actions.declare_file(target_unique_name(hs, "repl"))
add_sources = ["*" + f.path for f in set.to_list(hs_info.source_files)]
ghci_repl_script = hs.actions.declare_file(
target_unique_name(hs, "ghci-repl-script"),
)
hs.actions.expand_template(
template = ghci_script,
output = ghci_repl_script,
substitutions = {
"{ADD_SOURCES}": " ".join(add_sources),
"{COMMANDS}": "",
},
)
# Extra arguments.
# `compiler flags` is the default set of arguments for the repl,
# augmented by `repl_ghci_args`.
# The ordering is important, first compiler flags (from toolchain
# and local rule), then from `repl_ghci_args`. This way the more
# specific arguments are listed last, and then have more priority in
# GHC.
# Note that most flags for GHCI do have their negative value, so a
# negative flag in `repl_ghci_args` can disable a positive flag set
# in `user_compile_flags`, such as `-XNoOverloadedStrings` will disable
# `-XOverloadedStrings`.
args += hs.toolchain.compiler_flags + user_compile_flags + hs.toolchain.repl_ghci_args + repl_ghci_args
hs.actions.expand_template(
template = ghci_repl_wrapper,
output = repl_file,
substitutions = {
"{ENV}": render_env(ghc_env),
"{TOOL}": hs.tools.ghci.path,
"{ARGS}": " ".join(
[
"-ghci-script",
paths.join("$RULES_HASKELL_EXEC_ROOT", ghci_repl_script.path),
] + [
shell.quote(a)
for a in args
],
),
},
is_executable = True,
)
ghc_info = struct(
has_version = pkg_ghc_info.has_version,
library_path = library_path,
ld_library_path = ld_library_path,
packages = pkg_ghc_info.packages,
package_ids = pkg_ghc_info.package_ids,
package_dbs = pkg_ghc_info.package_dbs,
lib_imports = lib_imports,
libraries = libraries,
execs = struct(
ghc = hs.tools.ghc.path,
ghci = hs.tools.ghci.path,
runghc = hs.tools.runghc.path,
),
flags = struct(
compiler = user_compile_flags,
toolchain_compiler = hs.toolchain.compiler_flags,
repl = repl_ghci_args,
toolchain_repl = hs.toolchain.repl_ghci_args,
),
)
ghc_info_file = hs.actions.declare_file(
target_unique_name(hs, "ghc-info"),
)
hs.actions.write(
output = ghc_info_file,
content = ghc_info.to_json(),
)
# XXX We create a symlink here because we need to force
# hs.tools.ghci and ghci_script and the best way to do that is
# to use hs.actions.run. That action, in turn must produce
# a result, so using ln seems to be the only sane choice.
extra_inputs = depset(transitive = [
depset([
hs.tools.ghci,
ghci_repl_script,
repl_file,
ghc_info_file,
]),
set.to_depset(package_databases),
depset(library_deps),
depset(ld_library_deps),
set.to_depset(hs_info.source_files),
])
ln(hs, repl_file, output, extra_inputs)

View file

@ -0,0 +1,115 @@
"""runghc support"""
load(":private/context.bzl", "render_env")
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"link_libraries",
"ln",
"target_unique_name",
)
load(
":private/set.bzl",
"set",
)
load(":providers.bzl", "get_libs_for_ghc_linker")
load("@bazel_skylib//lib:shell.bzl", "shell")
def build_haskell_runghc(
hs,
runghc_wrapper,
user_compile_flags,
extra_args,
hs_info,
output,
package_databases,
version,
lib_info = None):
"""Build runghc script.
Args:
hs: Haskell context.
hs_info: HaskellInfo.
package_databases: package caches excluding the cache file of the package
we're creating a runghc for.
lib_info: If we're building runghc for a library target, pass
HaskellLibraryInfo here, otherwise it should be None.
Returns:
None.
"""
args = pkg_info_to_compile_flags(expose_packages(
hs_info,
lib_info,
use_direct = False,
use_my_pkg_id = None,
custom_package_databases = package_databases,
version = version,
))
if lib_info != None:
for idir in set.to_list(hs_info.import_dirs):
args += ["-i{0}".format(idir)]
link_ctx = hs_info.cc_dependencies.dynamic_linking
libs_to_link = link_ctx.dynamic_libraries_for_runtime.to_list()
# External C libraries that we need to make available to runghc.
link_libraries(libs_to_link, args)
# Transitive library dependencies to have in runfiles.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
hs_info.transitive_cc_dependencies,
path_prefix = "$RULES_HASKELL_EXEC_ROOT",
)
runghc_file = hs.actions.declare_file(target_unique_name(hs, "runghc"))
# Extra arguments.
# `compiler flags` is the default set of arguments for runghc,
# augmented by `extra_args`.
# The ordering is important, first compiler flags (from toolchain
# and local rule), then from `extra_args`. This way the more
# specific arguments are listed last, and then have more priority in
# GHC.
# Note that most flags for GHCI do have their negative value, so a
# negative flag in `extra_args` can disable a positive flag set
# in `user_compile_flags`, such as `-XNoOverloadedStrings` will disable
# `-XOverloadedStrings`.
args += hs.toolchain.compiler_flags + user_compile_flags + hs.toolchain.repl_ghci_args
# ghc args need to be wrapped up in "--ghc-arg=" when passing to runghc
runcompile_flags = ["--ghc-arg=%s" % a for a in args]
runcompile_flags += extra_args
hs.actions.expand_template(
template = runghc_wrapper,
output = runghc_file,
substitutions = {
"{ENV}": render_env(ghc_env),
"{TOOL}": hs.tools.runghc.path,
"{ARGS}": " ".join([shell.quote(a) for a in runcompile_flags]),
},
is_executable = True,
)
# XXX We create a symlink here because we need to force
# hs.tools.runghc and the best way to do that is
# to use hs.actions.run. That action, in turn must produce
# a result, so using ln seems to be the only sane choice.
extra_inputs = depset(transitive = [
depset([
hs.tools.runghc,
runghc_file,
]),
set.to_depset(package_databases),
depset(library_deps),
depset(ld_library_deps),
set.to_depset(hs_info.source_files),
])
ln(hs, runghc_file, output, extra_inputs)