feat(third_party/bazel): Check in rules_haskell from Tweag

This commit is contained in:
Vincent Ambo 2019-07-04 11:18:12 +01:00
parent 2eb1dc26e4
commit f723b8b878
479 changed files with 51484 additions and 0 deletions

View file

@ -0,0 +1,46 @@
exports_files(
glob(["*.bzl"]) + [
"assets/ghci_script",
"private/ghci_repl_wrapper.sh",
"private/haddock_wrapper.sh.tpl",
"private/coverage_wrapper.sh.tpl",
"private/osx_cc_wrapper.sh.tpl",
],
)
# to make functions visible to unit tests
exports_files(
["private/actions/link.bzl"],
visibility = ["//tests/unit-tests:__pkg__"],
)
py_binary(
name = "ls_modules",
srcs = ["private/ls_modules.py"],
visibility = ["//visibility:public"],
)
py_binary(
name = "version_macros",
srcs = ["private/version_macros.py"],
visibility = ["//visibility:public"],
)
# generate the _GHC_BINDISTS dict
py_binary(
name = "gen-ghc-bindist",
srcs = [":gen_ghc_bindist.py"],
main = ":gen_ghc_bindist.py",
visibility = ["//visibility:public"],
)
# toolchains must have a valid toolchain_type from bazel 0.21
toolchain_type(
name = "toolchain",
visibility = ["//visibility:public"],
)
toolchain_type(
name = "doctest-toolchain",
visibility = ["//visibility:public"],
)

View file

@ -0,0 +1,49 @@
major_version: "local"
minor_version: ""
toolchain {
toolchain_identifier: "ghc_windows_mingw64"
abi_version: "local"
abi_libc_version: "local"
builtin_sysroot: ""
compiler: "ghc-mingw-gcc"
host_system_name: "local"
needsPic: false
target_libc: "mingw"
target_cpu: "x64_windows"
target_system_name: "local"
artifact_name_pattern {
category_name: 'executable'
prefix: ''
extension: '.exe'
}
tool_path { name: "ar" path: "mingw/bin/ar" }
tool_path { name: "compat-ld" path: "mingw/bin/ld" }
tool_path { name: "cpp" path: "mingw/bin/cpp" }
tool_path { name: "dwp" path: "mingw/bin/dwp" }
tool_path { name: "gcc" path: "mingw/bin/gcc" }
tool_path { name: "gcov" path: "mingw/bin/gcov" }
tool_path { name: "ld" path: "mingw/bin/ld" }
tool_path { name: "nm" path: "mingw/bin/nm" }
tool_path { name: "objcopy" path: "mingw/bin/objcopy" }
tool_path { name: "objdump" path: "mingw/bin/objdump" }
tool_path { name: "strip" path: "mingw/bin/strip" }
cxx_builtin_include_directory: "mingw"
cxx_flag: "-std=gnu++0x"
# Needed to prevent Bazel from complaining about undeclared inclusions of
# MingW headers.
#
# See: https://github.com/bazelbuild/bazel/issues/4605
unfiltered_cxx_flag: "-no-canonical-prefixes"
unfiltered_cxx_flag: "-fno-canonical-system-headers"
linker_flag: "-lstdc++"
objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary"
feature { name: "targets_windows" implies: "copy_dynamic_libraries_to_binary" enabled: true }
feature { name: "copy_dynamic_libraries_to_binary" }
linking_mode_flags { mode: DYNAMIC }
}

View file

@ -0,0 +1,11 @@
--- lib/package.conf.d/base-4.12.0.0.conf 2019-03-19 18:04:35.186653529 +0100
+++ lib/package.conf.d/base-4.12.0.0.conf 2019-03-19 18:04:48.958873769 +0100
@@ -79,7 +79,7 @@
data-dir: $topdir\x86_64-windows-ghc-8.6.2\base-4.12.0.0
hs-libraries: HSbase-4.12.0.0
extra-libraries:
- wsock32 user32 shell32 msvcrt mingw32 mingwex
+ wsock32 user32 shell32 msvcrt mingw32 mingwex shlwapi
include-dirs: $topdir\base-4.12.0.0\include
includes:
HsBase.h

View file

@ -0,0 +1,11 @@
--- lib/package.conf.d/base-4.12.0.0.conf 2019-03-20 12:24:30.857292020 +0100
+++ lib/package.conf.d/base-4.12.0.0.conf 2019-03-20 12:24:44.637400564 +0100
@@ -79,7 +79,7 @@
data-dir: $topdir\x86_64-windows-ghc-8.6.4\base-4.12.0.0
hs-libraries: HSbase-4.12.0.0
extra-libraries:
- wsock32 user32 shell32 msvcrt mingw32 mingwex
+ wsock32 user32 shell32 msvcrt mingw32 mingwex shlwapi
include-dirs: $topdir\base-4.12.0.0\include
includes:
HsBase.h

View file

@ -0,0 +1,39 @@
:add {ADD_SOURCES}
:module + System.IO GHC.IO.Handle Control.Exception System.Directory
import qualified GHC.IO.Handle as Handle
import qualified System.IO as IO
import qualified System.Directory as Dir
rules_haskell_stdout_dupe <- Handle.hDuplicate IO.stdout
:{
(rules_haskell_stdout_copy_file, rules_haskell_stdout_copy_h) <- do
rules_haskell_tmp_dir <- Dir.getTemporaryDirectory Prelude.>>= Dir.canonicalizePath
(rules_haskell_fn, rules_haskell_h) <- IO.openTempFile rules_haskell_tmp_dir "rules-haskell-ghci-repl"
Handle.hDuplicateTo rules_haskell_h IO.stdout
Prelude.return (rules_haskell_fn, rules_haskell_h)
:}
:show modules
:{
rules_haskell_loaded_modules <- do
Handle.hClose rules_haskell_stdout_copy_h
-- I had to do it like this because flushing and then searching in the
-- stream at offset 0 did not work (no data is there, although the
-- corresponding file certainly contained it after flushing). Couldn't
-- figure this one out, so we first close the file and then read from it.
rules_haskell_h <- IO.openFile rules_haskell_stdout_copy_file IO.ReadMode
rules_haskell_xs <- Handle.hGetContents rules_haskell_h
Dir.removeFile rules_haskell_stdout_copy_file
Prelude.return Prelude.$ Prelude.takeWhile (Prelude./= ' ') Prelude.<$> Prelude.lines rules_haskell_xs
:}
hDuplicateTo rules_haskell_stdout_dupe IO.stdout
:{
let rules_haskell_add_loaded_modules _ =
Prelude.return Prelude.$ ":module + " Prelude.++
Data.List.intercalate " " (("*" Prelude.++) Prelude.<$> rules_haskell_loaded_modules)
:}
:module - System.IO GHC.IO.Handle Control.Exception System.Directory
:def rules_haskell_add_loaded_modules rules_haskell_add_loaded_modules
:rules_haskell_add_loaded_modules
:undef rules_haskell_add_loaded_modules
-- reload modules to drop the rules_haskell* definitions
:reload
{COMMANDS}

View file

@ -0,0 +1,183 @@
"""Support for c2hs"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"C2hsLibraryInfo",
"HaskellInfo",
)
load(":cc.bzl", "cc_interop_info")
load(":private/context.bzl", "haskell_context")
load(":private/dependencies.bzl", "gather_dep_info")
load(
":private/path_utils.bzl",
"declare_compiled",
"target_unique_name",
)
load(":private/set.bzl", "set")
load(":private/version_macros.bzl", "version_macro_includes")
def _c2hs_library_impl(ctx):
hs = haskell_context(ctx)
cc = cc_interop_info(ctx)
args = hs.actions.args()
c2hs = ctx.toolchains["@io_tweag_rules_haskell//haskell/c2hs:toolchain"].c2hs
if len(ctx.files.srcs) != 1:
fail("srcs field should contain exactly one file.")
chs_file = ctx.files.srcs[0]
# Output a Haskell source file.
chs_dir_raw = target_unique_name(hs, "chs")
hs_file = declare_compiled(hs, chs_file, ".hs", directory = chs_dir_raw)
chi_file = declare_compiled(hs, chs_file, ".chi", directory = chs_dir_raw)
args.add_all([chs_file.path, "-o", hs_file.path])
args.add("-C-E")
args.add_all(["--cpp", cc.tools.cpp])
args.add("-C-includeghcplatform.h")
args.add("-C-includeghcversion.h")
args.add_all(["-C" + x for x in cc.cpp_flags])
args.add_all(["-C" + x for x in cc.include_args])
dep_chi_files = [
dep[C2hsLibraryInfo].chi_file
for dep in ctx.attr.deps
if C2hsLibraryInfo in dep
]
chi_includes = [
"-i" + dep[C2hsLibraryInfo].import_dir
for dep in ctx.attr.deps
if C2hsLibraryInfo in dep
]
args.add_all(chi_includes)
version_macro_headers = set.empty()
if ctx.attr.version:
dep_info = gather_dep_info(ctx, ctx.attr.deps)
(version_macro_headers, version_macro_flags) = version_macro_includes(dep_info)
args.add_all(["-C" + x for x in version_macro_flags])
hs.actions.run_shell(
inputs = depset(transitive = [
depset(cc.hdrs),
depset([hs.tools.ghc, c2hs, chs_file]),
depset(dep_chi_files),
depset(cc.files),
set.to_depset(version_macro_headers),
]),
outputs = [hs_file, chi_file],
command = """
# Include libdir in include path just like hsc2hs does.
libdir=$({ghc} --print-libdir)
{c2hs} -C-I$libdir/include "$@"
""".format(
ghc = hs.tools.ghc.path,
c2hs = c2hs.path,
),
mnemonic = "HaskellC2Hs",
arguments = [args],
env = hs.env,
)
idir = paths.join(
hs.bin_dir.path,
hs.label.workspace_root,
hs.label.package,
chs_dir_raw,
)
return [
DefaultInfo(files = depset([hs_file])),
C2hsLibraryInfo(
chi_file = chi_file,
import_dir = idir,
),
]
c2hs_library = rule(
_c2hs_library_impl,
attrs = {
"deps": attr.label_list(),
"srcs": attr.label_list(allow_files = [".chs"]),
"src_strip_prefix": attr.string(
doc = "Directory in which module hierarchy starts.",
),
"version": attr.string(
doc = "Executable version. If this is specified, CPP version macros will be generated for this build.",
),
"_cc_toolchain": attr.label(
default = Label("@bazel_tools//tools/cpp:current_cc_toolchain"),
),
},
toolchains = [
"@io_tweag_rules_haskell//haskell:toolchain",
"@io_tweag_rules_haskell//haskell/c2hs:toolchain",
],
)
def _c2hs_toolchain_impl(ctx):
return [
platform_common.ToolchainInfo(
name = ctx.label.name,
c2hs = ctx.file.c2hs,
),
]
_c2hs_toolchain = rule(
_c2hs_toolchain_impl,
attrs = {
"c2hs": attr.label(
doc = "The c2hs executable.",
mandatory = True,
allow_single_file = True,
),
},
)
def c2hs_toolchain(name, c2hs, **kwargs):
"""Declare a Haskell c2hs toolchain.
You need at least one of these declared somewhere in your `BUILD`
files for the `chs_library` rule to work. Once declared, you then
need to *register* the toolchain using `register_toolchains` in
your `WORKSPACE` file (see example below).
Example:
In a `BUILD` file:
```bzl
c2hs_toolchain(
name = "c2hs",
c2hs = "@c2hs//:bin",
)
```
where `@c2hs` is an external repository defined in the
`WORKSPACE`, e.g. using:
```bzl
nixpkgs_package(
name = "c2hs",
attribute_path = "haskell.packages.ghc822.c2hs",
)
register_toolchains("//:c2hs")
```
"""
impl_name = name + "-impl"
_c2hs_toolchain(
name = impl_name,
c2hs = c2hs,
visibility = ["//visibility:public"],
**kwargs
)
native.toolchain(
name = name,
toolchain_type = "@io_tweag_rules_haskell//haskell/c2hs:toolchain",
toolchain = ":" + impl_name,
)

View file

@ -0,0 +1,4 @@
toolchain_type(
name = "toolchain",
visibility = ["//visibility:public"],
)

View file

@ -0,0 +1,353 @@
"""Interop with cc_* rules
These rules are deprecated.
"""
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
load(
"@bazel_tools//tools/build_defs/cc:action_names.bzl",
"CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME",
"C_COMPILE_ACTION_NAME",
)
load(":private/path_utils.bzl", "ln")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(":private/set.bzl", "set")
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"HaskellInfo",
)
CcInteropInfo = provider(
doc = "Information needed for interop with cc rules.",
fields = {
"tools": "Tools from the CC toolchain",
# See the following for why this is needed:
# https://stackoverflow.com/questions/52769846/custom-c-rule-with-the-cc-common-api
"files": "Files for all tools (input to any action that uses tools)",
"hdrs": "CC headers",
"cpp_flags": "Preprocessor flags",
"compiler_flags": "Flags for compilation",
"linker_flags": "Flags to forward to the linker",
"include_args": "Extra include dirs",
},
)
def cc_interop_info(ctx):
"""Gather information from any CC dependencies.
*Internal function - do not use.*
Args:
ctx: Rule context.
Returns:
CcInteropInfo: Information needed for CC interop.
"""
ccs = [dep[CcInfo] for dep in ctx.attr.deps if CcInfo in dep and HaskellInfo not in dep]
hdrs = []
include_args = []
cpp_flags = []
for cc in ccs:
cc_ctx = cc.compilation_context
hdrs.append(cc_ctx.headers)
include_args.extend(["-I" + include for include in cc_ctx.includes])
cpp_flags.extend(
[
"-D" + define
for define in cc_ctx.defines
] + [
f
for include in cc_ctx.quote_includes
for f in ["-iquote", include]
] + [
f
for include in cc_ctx.system_includes
for f in ["-isystem", include]
],
)
hdrs = depset(transitive = hdrs)
# XXX Workaround https://github.com/bazelbuild/bazel/issues/6874.
# Should be find_cpp_toolchain() instead.
cc_toolchain = ctx.attr._cc_toolchain[cc_common.CcToolchainInfo]
feature_configuration = cc_common.configure_features(
cc_toolchain = cc_toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
)
compile_variables = cc_common.create_compile_variables(
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
)
compiler_flags = cc_common.get_memory_inefficient_command_line(
feature_configuration = feature_configuration,
action_name = C_COMPILE_ACTION_NAME,
variables = compile_variables,
)
link_variables = cc_common.create_link_variables(
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
is_linking_dynamic_library = False,
is_static_linking_mode = True,
)
linker_flags = cc_common.get_memory_inefficient_command_line(
feature_configuration = feature_configuration,
action_name = CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME,
variables = link_variables,
)
# Generate cc wrapper script on Darwin that adjusts load commands.
hs_toolchain = ctx.toolchains["@io_tweag_rules_haskell//haskell:toolchain"]
if hs_toolchain.is_darwin:
cc_wrapper = ctx.actions.declare_file("osx_cc_wrapper")
cc = cc_wrapper.path
ctx.actions.expand_template(
template = hs_toolchain.osx_cc_wrapper_tpl,
output = cc_wrapper,
substitutions = {
"%{cc}": cc_toolchain.compiler_executable(),
},
)
cc_files = ctx.files._cc_toolchain + [
cc_wrapper,
]
else:
cc = cc_toolchain.compiler_executable()
cc_files = ctx.files._cc_toolchain
# XXX Workaround https://github.com/bazelbuild/bazel/issues/6876.
linker_flags = [flag for flag in linker_flags if flag not in ["-shared"]]
tools = {
"ar": cc_toolchain.ar_executable(),
"cc": cc,
"ld": cc_toolchain.ld_executable(),
"cpp": cc_toolchain.preprocessor_executable(),
"nm": cc_toolchain.nm_executable(),
}
# If running on darwin but XCode is not installed (i.e., only the Command
# Line Tools are available), then Bazel will make ar_executable point to
# "/usr/bin/libtool". Since we call ar directly, override it.
# TODO: remove this if Bazel fixes its behavior.
# Upstream ticket: https://github.com/bazelbuild/bazel/issues/5127.
if tools["ar"].find("libtool") >= 0:
tools["ar"] = "/usr/bin/ar"
return CcInteropInfo(
tools = struct(**tools),
files = cc_files,
hdrs = hdrs.to_list(),
cpp_flags = cpp_flags,
include_args = include_args,
compiler_flags = compiler_flags,
# XXX this might not be the right set of flags for all situations,
# but this will anyways all be replaced (once implemented) by
# https://github.com/bazelbuild/bazel/issues/4571.
linker_flags = linker_flags,
)
def _cc_import_impl(ctx):
strip_prefix = ctx.attr.strip_include_prefix
# cc_library's strip_include_prefix attribute accepts both absolute and
# relative paths. For simplicity we currently only implement absolute
# paths.
if strip_prefix.startswith("/"):
prefix = strip_prefix[1:]
else:
prefix = paths.join(ctx.label.workspace_root, ctx.label.package, strip_prefix)
roots = set.empty()
for f in ctx.files.hdrs:
# If it's a generated file, strip off the bin or genfiles prefix.
path = f.path
if path.startswith(ctx.bin_dir.path):
path = paths.relativize(path, ctx.bin_dir.path)
elif path.startswith(ctx.genfiles_dir.path):
path = paths.relativize(path, ctx.genfiles_dir.path)
if not path.startswith(prefix):
fail("Header {} does not have expected prefix {}".format(
path,
prefix,
))
roots = set.insert(roots, f.root.path if f.root.path else ".")
include_directories = [paths.join(root, prefix) for root in set.to_list(roots)]
cc_toolchain = ctx.attr._cc_toolchain[cc_common.CcToolchainInfo]
feature_configuration = cc_common.configure_features(cc_toolchain = cc_toolchain)
compilation_context = cc_common.create_compilation_context(
headers = depset(transitive = [l.files for l in ctx.attr.hdrs]),
includes = depset(direct = include_directories),
)
linking_context = cc_common.create_linking_context(
libraries_to_link = [
cc_common.create_library_to_link(
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
dynamic_library = f,
)
for f in ctx.attr.shared_library.files
],
)
return [
CcInfo(
compilation_context = compilation_context,
linking_context = linking_context,
),
]
haskell_cc_import = rule(
_cc_import_impl,
attrs = {
"shared_library": attr.label(
# NOTE We do not list all extensions here because .so libraries may
# have numeric suffixes like foo.so.1.2.3, and if they also have
# SONAME with numeric suffix, matching file must be provided, so this
# attributes must accept libraries with almost arbitrary extensions.
# It would be easier if Skylark supported regexps.
allow_files = True,
doc = """A single precompiled shared library.
Bazel ensures it is available to the binary that depends on it
during runtime.
""",
),
"hdrs": attr.label_list(
allow_files = [".h"],
doc = """
The list of header files published by this precompiled library to be
directly included by sources in dependent rules.
""",
),
"strip_include_prefix": attr.string(
doc = """
The prefix to strip from the paths of the headers of this rule.
When set, the headers in the `hdrs` attribute of this rule are
accessible at their path (relative to the repository) with this
prefix cut off.
If it's a relative path, it's taken as a package-relative one. If it's an
absolute one, it's understood as a repository-relative path.
""",
),
"_cc_toolchain": attr.label(
default = Label("@bazel_tools//tools/cpp:current_cc_toolchain"),
),
},
)
"""Imports a prebuilt shared library.
Use this to make `.so`, `.dll`, `.dylib` files residing in external
[external repositories][bazel-ext-repos] available to Haskell rules.
*This rule is temporary replacement for [cc_import][cc_import] and is
deprecated. Use [cc_library][cc_library] instead as shown in the example.*
Example:
```bzl
# Deprecated, use cc_library instead.
# haskell_cc_import(name = "zlib", shared_library = "@zlib//:lib")
cc_library(name = "zlib", srcs = ["@zlib//:lib"])
haskell_import(
name = "base_pkg",
package = "base",
)
haskell_binary(
name = "crc32sum",
srcs = ["Main.hs"],
deps = [
"bazel_pkg",
":zlib",
],
)
```
[bazel-ext-repos]: https://docs.bazel.build/versions/master/external.html
[cc_import]: https://docs.bazel.build/versions/master/be/c-cpp.html#cc_import
[cc_library]: https://docs.bazel.build/versions/master/be/c-cpp.html#cc_library
"""
def _cc_haskell_import(ctx):
dyn_libs = set.empty()
if HaskellInfo in ctx.attr.dep:
set.mutable_union(dyn_libs, ctx.attr.dep[HaskellInfo].dynamic_libraries)
else:
fail("{0} has to provide `HaskellInfo`".format(ctx.attr.dep.label.name))
return [
DefaultInfo(
files = set.to_depset(dyn_libs),
default_runfiles = ctx.runfiles(
files = ctx.attr.dep.default_runfiles.files.to_list(),
collect_default = True,
),
data_runfiles = ctx.runfiles(
files = ctx.attr.dep.data_runfiles.files.to_list(),
collect_data = True,
),
),
]
cc_haskell_import = rule(
_cc_haskell_import,
attrs = {
"dep": attr.label(
doc = """
Target providing a `HaskellInfo` such as `haskell_library` or
`haskell_binary`.
""",
),
},
toolchains = ["@io_tweag_rules_haskell//haskell:toolchain"],
)
"""Exports a Haskell library as a CC library.
Given a [haskell_library](#haskell_library) or
[haskell_binary](#haskell_binary) input, outputs the shared object files
produced as well as the object files it depends on directly and
transitively. This is very useful if you want to link in a Haskell shared
library from `cc_library`.
There is a caveat: this will not provide any shared libraries that
aren't explicitly given to it. This means that if you're using
`prebuilt_dependencies` and relying on GHC to provide those objects,
they will not be present here. You will have to provide those
separately to your `cc_library`. If you're getting
`prebuilt_dependencies` from your toolchain, you will likely want to
extract those and pass them in as well.
*This rule is deprecated.*
Example:
```bzl
haskell_library(
name = "my-lib",
...
)
cc_haskell_import(
name = "my-lib-objects",
dep = ":my-lib",
)
cc_library(
name = "my-cc",
srcs = ["main.c", ":my-lib-objects"],
)
```
[bazel-cpp-sandwich]: https://github.com/bazelbuild/bazel/issues/2163
"""

View file

@ -0,0 +1,228 @@
"""Doctest support"""
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(":private/context.bzl", "haskell_context", "render_env")
load(
":private/path_utils.bzl",
"get_lib_name",
)
load(":providers.bzl", "get_libs_for_ghc_linker")
load(":private/set.bzl", "set")
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"HaskellInfo",
"HaskellLibraryInfo",
)
def _doctest_toolchain_impl(ctx):
return platform_common.ToolchainInfo(
name = ctx.label.name,
doctest = ctx.files.doctest,
)
_doctest_toolchain = rule(
_doctest_toolchain_impl,
attrs = {
"doctest": attr.label(
doc = "Doctest executable",
cfg = "host",
executable = True,
allow_single_file = True,
mandatory = True,
),
},
)
def haskell_doctest_toolchain(name, doctest, **kwargs):
"""Declare a toolchain for the `haskell_doctest` rule.
You need at least one of these declared somewhere in your `BUILD`files
for `haskell_doctest` to work. Once declared, you then need to *register*
the toolchain using `register_toolchains` in your `WORKSPACE` file.
Example:
In a `BUILD` file:
```bzl
haskell_doctest_toolchain(
name = "doctest",
doctest = "@doctest//:bin",
)
```
And in `WORKSPACE`:
```
register_toolchains("//:doctest")
```
"""
impl_name = name + "-impl"
_doctest_toolchain(
name = impl_name,
doctest = doctest,
visibility = ["//visibility:public"],
**kwargs
)
native.toolchain(
name = name,
toolchain_type = "@io_tweag_rules_haskell//haskell:doctest-toolchain",
toolchain = ":" + impl_name,
)
def _haskell_doctest_single(target, ctx):
"""Doctest a single Haskell `target`.
Args:
target: Provider(s) of the target to doctest.
ctx: Rule context.
Returns:
File: the doctest log.
"""
if HaskellInfo not in target:
return []
hs = haskell_context(ctx, ctx.attr)
hs_info = target[HaskellInfo]
cc_info = target[CcInfo]
lib_info = target[HaskellLibraryInfo] if HaskellLibraryInfo in target else None
args = ctx.actions.args()
args.add("--no-magic")
doctest_log = ctx.actions.declare_file(
"doctest-log-" + ctx.label.name + "-" + target.label.name,
)
toolchain = ctx.toolchains["@io_tweag_rules_haskell//haskell:doctest-toolchain"]
# GHC flags we have prepared before.
args.add_all(hs_info.compile_flags)
# Add any extra flags specified by the user.
args.add_all(ctx.attr.doctest_flags)
# Direct C library dependencies to link against.
link_ctx = hs_info.cc_dependencies.dynamic_linking
libs_to_link = link_ctx.libraries_to_link.to_list()
# External libraries.
seen_libs = set.empty()
for lib in libs_to_link:
lib_name = get_lib_name(lib)
if not set.is_member(seen_libs, lib_name):
set.mutable_insert(seen_libs, lib_name)
if hs.toolchain.is_darwin:
args.add_all([
"-optl-l{0}".format(lib_name),
"-optl-L{0}".format(paths.dirname(lib.path)),
])
else:
args.add_all([
"-l{0}".format(lib_name),
"-L{0}".format(paths.dirname(lib.path)),
])
# Transitive library dependencies for runtime.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
hs_info.transitive_cc_dependencies,
)
sources = set.to_list(hs_info.source_files)
if ctx.attr.modules:
inputs = ctx.attr.modules
else:
inputs = [source.path for source in sources]
ctx.actions.run_shell(
inputs = depset(transitive = [
depset(sources),
set.to_depset(hs_info.package_databases),
set.to_depset(hs_info.interface_dirs),
set.to_depset(hs_info.dynamic_libraries),
cc_info.compilation_context.headers,
depset(library_deps),
depset(ld_library_deps),
depset(
toolchain.doctest +
[hs.tools.ghc],
),
]),
outputs = [doctest_log],
mnemonic = "HaskellDoctest",
progress_message = "HaskellDoctest {}".format(ctx.label),
command = """
{env}
{doctest} "$@" {inputs} > {output} 2>&1 || (rc=$? && cat {output} && exit $rc)
""".format(
doctest = toolchain.doctest[0].path,
output = doctest_log.path,
inputs = " ".join(inputs),
# XXX Workaround
# https://github.com/bazelbuild/bazel/issues/5980.
env = render_env(hs.env),
),
arguments = [args],
# NOTE It looks like we must specify the paths here as well as via -L
# flags because there are at least two different "consumers" of the info
# (ghc and linker?) and they seem to prefer to get it in different ways
# in this case.
env = dicts.add(
ghc_env,
hs.env,
),
execution_requirements = {
# Prevents a race condition among concurrent doctest tests on Linux.
#
# The reason is that the doctest process uses its own PID to determine the name
# of its working directory. In presence of PID namespacing, this occasionally results
# in multiple concurrent processes attempting to create the same directory.
# See https://github.com/sol/doctest/issues/219 for details.
#
# For some reason, setting "exclusive": "1" does not fix the issue, so we disable
# sandboxing altogether for doctest tests.
"no-sandbox": "1",
},
)
return doctest_log
def _haskell_doctest_impl(ctx):
logs = []
for dep in ctx.attr.deps:
logs.append(_haskell_doctest_single(dep, ctx))
return DefaultInfo(
files = depset(logs),
)
haskell_doctest = rule(
_haskell_doctest_impl,
attrs = {
"deps": attr.label_list(
doc = "List of Haskell targets to lint.",
),
"doctest_flags": attr.string_list(
doc = "Extra flags to pass to doctest executable.",
),
"modules": attr.string_list(
doc = """List of names of modules that will be tested. If the list is
omitted, all exposed modules provided by `deps` will be tested.
""",
),
},
toolchains = [
"@io_tweag_rules_haskell//haskell:toolchain",
"@io_tweag_rules_haskell//haskell:doctest-toolchain",
],
)
"""Run doctest test on targets in `deps`.
Note that your toolchain must be equipped with `doctest` executable, i.e.
you should specify location of the executable using the `doctest` attribute
of `haskell_doctest_toolchain`.
"""

View file

@ -0,0 +1,152 @@
#!/usr/bin/env python
# This is a happy-path tool to download the bindist
# download paths and hashes, for maintainers.
# It uses the hashes provided by download.haskell.org.
from __future__ import print_function
import pprint
import sys
import urllib2
# All GHC versions we generate.
# `version` is the version number
# `distribution_version` is a corrected name
# (sometimes bindists have errors and are updated by new bindists)
# `ignore_prefixes` is the prefix of files to ignore
# `ignore_suffixes` is the suffix of files to ignore
VERSIONS = [
{ "version": "8.6.5" },
{ "version": "8.6.4" },
{ "version": "8.6.3" },
{ "version": "8.6.2" },
{ "version": "8.4.4" },
{ "version": "8.4.3" },
{ "version": "8.4.2" },
{ "version": "8.4.1" },
{ "version": "8.2.2" },
{ "version": "8.0.2",
"ignore_suffixes": [".patch"] },
{ "version": "7.10.3",
"distribution_version": "7.10.3b",
"ignore_prefixes": ["ghc-7.10.3-", "ghc-7.10.3a-"],
"ignore_suffixes": [".bz2", ".patch" ] }
]
# All architectures we generate.
# bazel: bazel name
# upstream: download.haskell.org name
ARCHES = [
{ "bazel": "linux_amd64",
"upstream": "x86_64-deb8-linux", },
{ "bazel": "darwin_amd64",
"upstream": "x86_64-apple-darwin" },
{ "bazel": "windows_amd64",
"upstream": "x86_64-unknown-mingw32" },
]
# An url to a bindist tarball.
def link_for_tarball(arch, version):
return "https://downloads.haskell.org/~ghc/{ver}/ghc-{ver}-{arch}.tar.xz".format(
ver = version,
arch = arch,
)
# An url to a version's tarball hashsum file.
# The files contain the hashsums for all arches.
def link_for_sha256_file(version):
return "https://downloads.haskell.org/~ghc/{ver}/SHA256SUMS".format(
ver = version
)
# Parses the tarball hashsum file for a distribution version.
def parse_sha256_file(content, version, url):
res = {}
errs = []
for line in content:
# f5763983a26dedd88b65a0b17267359a3981b83a642569b26334423f684f8b8c ./ghc-8.4.3-i386-deb8-linux.tar.xz
(hash, file_) = line.strip().split(" ./")
prefix = "ghc-{ver}-".format(ver = version.get("distribution_version", version['version']))
suffix = ".tar.xz"
# filter ignored files
if any([file_.startswith(p) for p in version.get("ignore_prefixes", [])]) \
or any([file_.endswith(s) for s in version.get("ignore_suffixes", [])]):
continue
if file_.startswith(prefix) and file_.endswith(suffix):
# i386-deb8-linux
name = file_[len(prefix):-len(suffix)]
res[name] = hash
else:
errs.append("Can't parse the sha256 field for {ver}: {entry}".format(
ver = version['version'], entry = line.strip()))
if errs:
eprint("Errors parsing file at " + url + ". Either fix or ignore the lines (ignore_suffixes/ignore_prefixes).")
for e in errs:
eprint(e)
exit(1)
return res
# Print to stderr.
def eprint(mes):
print(mes, file = sys.stderr)
# Main.
if __name__ == "__main__":
# Fetch all hashsum files
# grab : { version: { arch: sha256 } }
grab = {}
for ver in VERSIONS:
eprint("fetching " + ver['version'])
url = link_for_sha256_file(ver['version'])
res = urllib2.urlopen(url)
if res.getcode() != 200:
eprint("download of {} failed with status {}".format(url, res.getcode()))
sys.exit(1)
else:
grab[ver['version']] = parse_sha256_file(res, ver, url)
# check whether any version is missing arches we need
# errs : { version: set(missing_arches) }
errs = {}
for ver, hashes in grab.items():
real_arches = frozenset(hashes.keys())
needed_arches = frozenset([a['upstream'] for a in ARCHES])
missing_arches = needed_arches.difference(real_arches)
if missing_arches:
errs[ver] = missing_arches
if errs:
for ver, missing in errs.items():
eprint("version {ver} is missing hashes for required architectures {arches}".format(
ver = ver,
arches = missing))
# fetch the arches we need and create the GHC_BINDISTS dict
# ghc_bindists : { version: { bazel_arch: (tarball_url, sha256_hash) } }
ghc_bindists = {}
for ver, hashes in grab.items():
# { bazel_arch: (tarball_url, sha256_hash) }
arch_dists = {}
for arch in ARCHES:
hashes[arch['upstream']]
arch_dists[arch['bazel']] = (
link_for_tarball(arch['upstream'], ver),
hashes[arch['upstream']]
)
ghc_bindists[ver] = arch_dists
# Print to stdout. Be aware that you can't `> foo.bzl`,
# because that truncates the source file which is needed
# for bazel to run in the first place.
print(""" \
# Generated with `bazel run @io_tweag_rules_haskell//haskell:gen-ghc-bindist | sponge haskell/private/ghc_bindist_generated.bzl`
# To add a version or architecture, edit the constants in haskell/gen_ghc_bindist.py
GHC_BINDIST = \\""")
pprint.pprint(ghc_bindists)

View file

@ -0,0 +1,83 @@
package(default_visibility = ["//visibility:public"])
filegroup(
name = "bin",
srcs = glob(["bin/*"]),
)
cc_library(
name = "threaded-rts",
srcs = glob(
["lib/ghc-*/rts/libHSrts_thr-ghc*." + ext for ext in [
"so",
"dylib",
]] +
# dependency of `libHSrts_thr_ghc*`
# globbing on the `so` version to stay working when they update
[
"lib/ghc-*/rts/libffi.so.*",
],
),
hdrs = glob(["lib/ghc-*/include/**/*.h"]),
strip_include_prefix = glob(
["lib/ghc-*/include"],
exclude_directories = 0,
)[0],
)
# TODO: detect this more automatically.
cc_library(
name = "unix-includes",
hdrs = glob(["lib/ghc-*/unix-*/include/*.h"]),
includes = glob(
["lib/ghc-*/unix-*/include"],
exclude_directories = 0,
),
)
# This is needed for Hazel targets.
cc_library(
name = "rts-headers",
hdrs = glob([
"lib/ghc-*/include/**/*.h",
"lib/include/**/*.h",
]),
includes = glob(
[
"lib/ghc-*/include",
"lib/include",
],
exclude_directories = 0,
),
)
# Expose embedded MinGW toolchain when on Windows.
filegroup(
name = "empty",
srcs = [],
)
cc_toolchain_suite(
name = "toolchain",
toolchains = {
"x64_windows": ":cc-compiler-mingw64",
"x64_windows|ghc-mingw-gcc": ":cc-compiler-mingw64",
},
)
# Keep in sync with @bazel_tools//cpp:cc-compiler-x64_windows definition.
cc_toolchain(
name = "cc-compiler-mingw64",
all_files = ":empty",
ar_files = ":empty",
as_files = ":empty",
compiler_files = ":empty",
cpu = "x64_windows",
dwp_files = ":empty",
linker_files = ":empty",
objcopy_files = ":empty",
strip_files = ":empty",
supports_param_files = 0,
toolchain_identifier = "ghc_windows_mingw64",
)

View file

@ -0,0 +1,408 @@
"""Workspace rules (GHC binary distributions)"""
_GHC_DEFAULT_VERSION = "8.4.4"
# Generated with `bazel run @io_tweag_rules_haskell//haskell:gen-ghc-bindist`
# To add a version or architecture, edit the constants in haskell/gen_ghc_bindist.py,
# regenerate the dict and copy it here.
# Wed like to put this dict into its own file,
# but that triggers a bug in Skydoc unfortunately.
GHC_BINDIST = \
{
"7.10.3": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/7.10.3/ghc-7.10.3-x86_64-apple-darwin.tar.xz",
"b7cad2ea7badb7006621105fbf24b4bd364d2e51c1a75661978d9280d68e83a8",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/7.10.3/ghc-7.10.3-x86_64-deb8-linux.tar.xz",
"804c75c4635353bf987c1ca120b8531c7bb4957c5b84d29c7adc4894b6fd579d",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/7.10.3/ghc-7.10.3-x86_64-unknown-mingw32.tar.xz",
"cc7987ca7ffcd8fc8b999ed8f7408300cd9fef156032338fd57d63f577532b81",
),
},
"8.0.2": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.0.2/ghc-8.0.2-x86_64-apple-darwin.tar.xz",
"ff50a2df9f002f33b9f09717ebf5ec5a47906b9b65cc57b1f9849f8b2e06788d",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.0.2/ghc-8.0.2-x86_64-deb8-linux.tar.xz",
"5ee68290db00ca0b79d57bc3a5bdce470de9ce9da0b098a7ce6c504605856c8f",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.0.2/ghc-8.0.2-x86_64-unknown-mingw32.tar.xz",
"8c42c1f4af995205b9816a1e97e2752fe758544c1f5fe77958cdcd319c9c2d53",
),
},
"8.2.2": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.2.2/ghc-8.2.2-x86_64-apple-darwin.tar.xz",
"f90fcf62f7e0936a6dfc3601cf663729bfe9bbf85097d2d75f0a16f8c2e95c27",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.2.2/ghc-8.2.2-x86_64-deb8-linux.tar.xz",
"48e205c62b9dc1ccf6739a4bc15a71e56dde2f891a9d786a1b115f0286111b2a",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.2.2/ghc-8.2.2-x86_64-unknown-mingw32.tar.xz",
"1e033df2092aa546e763e7be63167720b32df64f76673ea1ce7ae7c9f564b223",
),
},
"8.4.1": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.4.1/ghc-8.4.1-x86_64-apple-darwin.tar.xz",
"d774e39f3a0105843efd06709b214ee332c30203e6c5902dd6ed45e36285f9b7",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.4.1/ghc-8.4.1-x86_64-deb8-linux.tar.xz",
"427c77a934b30c3f1de992c38c072afb4323fe6fb30dbac919ca8cb6ae98fbd9",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.4.1/ghc-8.4.1-x86_64-unknown-mingw32.tar.xz",
"328b013fc651d34e075019107e58bb6c8a578f0155cf3ad4557e6f2661b03131",
),
},
"8.4.2": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.4.2/ghc-8.4.2-x86_64-apple-darwin.tar.xz",
"87469222042b9ac23f9db216a8d4e5107297bdbbb99df71eb4d9e7208455def2",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.4.2/ghc-8.4.2-x86_64-deb8-linux.tar.xz",
"246f66eb56f4ad0f1c7755502cfc8f9972f2d067dede17e151f6f479c1f76fbd",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.4.2/ghc-8.4.2-x86_64-unknown-mingw32.tar.xz",
"797634aa9812fc6b2084a24ddb4fde44fa83a2f59daea82e0af81ca3dd323fde",
),
},
"8.4.3": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.4.3/ghc-8.4.3-x86_64-apple-darwin.tar.xz",
"af0b455f6c46b9802b4b48dad996619cfa27cc6e2bf2ce5532387b4a8c00aa64",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.4.3/ghc-8.4.3-x86_64-deb8-linux.tar.xz",
"30a402c6d4754a6c020e0547f19ae3ac42e907e35349aa932d347f73e421a8e2",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.4.3/ghc-8.4.3-x86_64-unknown-mingw32.tar.xz",
"8a83cfbf9ae84de0443c39c93b931693bdf2a6d4bf163ffb41855f80f4bf883e",
),
},
"8.4.4": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.4.4/ghc-8.4.4-x86_64-apple-darwin.tar.xz",
"28dc89ebd231335337c656f4c5ead2ae2a1acc166aafe74a14f084393c5ef03a",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.4.4/ghc-8.4.4-x86_64-deb8-linux.tar.xz",
"4c2a8857f76b7f3e34ecba0b51015d5cb8b767fe5377a7ec477abde10705ab1a",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.4.4/ghc-8.4.4-x86_64-unknown-mingw32.tar.xz",
"da29dbb0f1199611c7d5bb7b0dd6a7426ca98f67dfd6da1526b033cd3830dc05",
),
},
"8.6.2": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.6.2/ghc-8.6.2-x86_64-apple-darwin.tar.xz",
"8ec46a25872226dd7e5cf7271e3f3450c05f32144b96e6b9cb44cc4079db50dc",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.6.2/ghc-8.6.2-x86_64-deb8-linux.tar.xz",
"13f96e8b83bb5bb60f955786ff9085744c24927a33be8a17773f84c7c248533a",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.6.2/ghc-8.6.2-x86_64-unknown-mingw32.tar.xz",
"9a398e133cab09ff2610834337355d4e26c35e0665403fb9ff8db79315f74d3d",
),
},
"8.6.3": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.6.3/ghc-8.6.3-x86_64-apple-darwin.tar.xz",
"79d069a1a7d74cfdd7ac2a2711c45d3ddc6265b988a0cefa342714b24f997fc1",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.6.3/ghc-8.6.3-x86_64-deb8-linux.tar.xz",
"291ca565374f4d51cc311488581f3279d3167a064fabfd4a6722fe2bd4532fd5",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.6.3/ghc-8.6.3-x86_64-unknown-mingw32.tar.xz",
"2fec383904e5fa79413e9afd328faf9bc700006c8c3d4bcdd8d4f2ccf0f7fa2a",
),
},
"8.6.4": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.6.4/ghc-8.6.4-x86_64-apple-darwin.tar.xz",
"cccb58f142fe41b601d73690809f6089f7715b6a50a09aa3d0104176ab4db09e",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.6.4/ghc-8.6.4-x86_64-deb8-linux.tar.xz",
"34ef5fc8ddf2fc32a027180bea5b1c8a81ea840c87faace2977a572188d4b42d",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.6.4/ghc-8.6.4-x86_64-unknown-mingw32.tar.xz",
"e8d021b7a90772fc559862079da20538498d991956d7557b468ca19ddda22a08",
),
},
"8.6.5": {
"darwin_amd64": (
"https://downloads.haskell.org/~ghc/8.6.5/ghc-8.6.5-x86_64-apple-darwin.tar.xz",
"dfc1bdb1d303a87a8552aa17f5b080e61351f2823c2b99071ec23d0837422169",
),
"linux_amd64": (
"https://downloads.haskell.org/~ghc/8.6.5/ghc-8.6.5-x86_64-deb8-linux.tar.xz",
"c419fd0aa9065fe4d2eb9a248e323860c696ddf3859749ca96a84938aee49107",
),
"windows_amd64": (
"https://downloads.haskell.org/~ghc/8.6.5/ghc-8.6.5-x86_64-unknown-mingw32.tar.xz",
"457024c6ea43bdce340af428d86319931f267089398b859b00efdfe2fd4ce93f",
),
},
}
def _execute_fail_loudly(ctx, args):
"""Execute a command and fail loudly if it fails.
Args:
ctx: Repository rule context.
args: Command and its arguments.
"""
eresult = ctx.execute(args, quiet = False)
if eresult.return_code != 0:
fail("{0} failed, aborting creation of GHC bindist".format(" ".join(args)))
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "patch")
def _ghc_bindist_impl(ctx):
# Avoid rule restart by resolving these labels early. See
# https://github.com/bazelbuild/bazel/blob/master/tools/cpp/lib_cc_configure.bzl#L17.
ghc_build = ctx.path(Label("//haskell:ghc.BUILD"))
crosstool_windows = ctx.path(Label("//haskell:CROSSTOOL.windows"))
version = ctx.attr.version
target = ctx.attr.target
os, _, arch = target.partition("_")
if GHC_BINDIST[version].get(target) == None:
fail("Operating system {0} does not have a bindist for GHC version {1}".format(ctx.os.name, ctx.attr.version))
else:
url, sha256 = GHC_BINDIST[version][target]
bindist_dir = ctx.path(".") # repo path
ctx.download_and_extract(
url = url,
output = ".",
sha256 = sha256,
type = "tar.xz",
stripPrefix = "ghc-" + version,
)
# We apply some patches, if needed.
patch(ctx)
# As the patches may touch the package DB we regenerate the cache.
if len(ctx.attr.patches) > 0:
_execute_fail_loudly(ctx, ["./bin/ghc-pkg", "recache"])
# On Windows the bindist already contains the built executables
if os != "windows":
_execute_fail_loudly(ctx, ["sed", "-i", "s/RelocatableBuild = NO/RelocatableBuild = YES/", "mk/config.mk.in"])
_execute_fail_loudly(ctx, ["./configure", "--prefix", bindist_dir.realpath])
_execute_fail_loudly(ctx, ["make", "install"])
ctx.file("patch_bins", executable = True, content = """#!/usr/bin/env bash
grep -lZ {bindist_dir} bin/* | xargs -0 --verbose \\
sed -i \\
-e '2iDISTDIR="$( dirname "$(resolved="$0"; while tmp="$(readlink "$resolved")"; do resolved="$tmp"; done; echo "$resolved")" )/.."' \\
-e 's:{bindist_dir}:$DISTDIR:'
""".format(
bindist_dir = bindist_dir.realpath,
))
_execute_fail_loudly(ctx, ["./patch_bins"])
ctx.template(
"BUILD",
ghc_build,
executable = False,
)
ctx.template("CROSSTOOL", crosstool_windows, executable = False)
ctx.file("WORKSPACE")
_ghc_bindist = repository_rule(
_ghc_bindist_impl,
local = False,
attrs = {
"version": attr.string(
default = _GHC_DEFAULT_VERSION,
values = GHC_BINDIST.keys(),
doc = "The desired GHC version",
),
"target": attr.string(),
"patches": attr.label_list(
default = [],
doc =
"A list of files that are to be applied as patches afer " +
"extracting the archive.",
),
"patch_tool": attr.string(
default = "patch",
doc = "The patch(1) utility to use.",
),
"patch_args": attr.string_list(
default = ["-p0"],
doc = "The arguments given to the patch tool",
),
"patch_cmds": attr.string_list(
default = [],
doc = "Sequence of commands to be applied after patches are applied.",
),
},
)
def _ghc_bindist_toolchain_impl(ctx):
os, _, arch = ctx.attr.target.partition("_")
exec_constraints = [{
"darwin": "@bazel_tools//platforms:osx",
"linux": "@bazel_tools//platforms:linux",
"windows": "@bazel_tools//platforms:windows",
}.get(os)]
target_constraints = exec_constraints
ctx.file(
"BUILD",
executable = False,
content = """
load("@io_tweag_rules_haskell//haskell:toolchain.bzl", "haskell_toolchain")
haskell_toolchain(
name = "toolchain",
tools = ["{tools}"],
version = "{version}",
compiler_flags = {compiler_flags},
haddock_flags = {haddock_flags},
repl_ghci_args = {repl_ghci_args},
exec_compatible_with = {exec_constraints},
target_compatible_with = {target_constraints},
)
""".format(
tools = "@{}//:bin".format(ctx.attr.bindist_name),
version = ctx.attr.version,
compiler_flags = ctx.attr.compiler_flags,
haddock_flags = ctx.attr.haddock_flags,
repl_ghci_args = ctx.attr.repl_ghci_args,
exec_constraints = exec_constraints,
target_constraints = target_constraints,
),
)
_ghc_bindist_toolchain = repository_rule(
_ghc_bindist_toolchain_impl,
local = False,
attrs = {
"bindist_name": attr.string(),
"version": attr.string(),
"compiler_flags": attr.string_list(),
"haddock_flags": attr.string_list(),
"repl_ghci_args": attr.string_list(),
"target": attr.string(),
},
)
def ghc_bindist(
name,
version,
target,
compiler_flags = None,
haddock_flags = None,
repl_ghci_args = None):
"""Create a new repository from binary distributions of GHC. The
repository exports two targets:
* a `bin` filegroup containing all GHC commands,
* a `threaded-rts` CC library.
These targets are unpacked from a binary distribution specific to your
platform. Only the platforms that have a "binary package" on the GHC
[download page](https://www.haskell.org/ghc/) are supported.
Example:
In `WORKSPACE` file:
```bzl
load("@io_tweag_rules_haskell//haskell:haskell.bzl", "ghc_bindist")
# This repository rule creates @ghc repository.
ghc_bindist(
name = "ghc",
version = "8.2.2",
)
```
"""
bindist_name = name
toolchain_name = "{}-toolchain".format(name)
# Recent GHC versions on Windows contain a bug:
# https://gitlab.haskell.org/ghc/ghc/issues/16466
# We work around this by patching the base configuration.
patches = {
"8.6.2": ["@io_tweag_rules_haskell//haskell:assets/ghc_8_6_2_win_base.patch"],
"8.6.4": ["@io_tweag_rules_haskell//haskell:assets/ghc_8_6_4_win_base.patch"],
}.get(version) if target == "windows_amd64" else None
extra_attrs = {"patches": patches, "patch_args": ["-p0"]} if patches else {}
# We want the toolchain definition to be tucked away in a separate
# repository, that way `bazel build //...` will not match it (and
# e.g. build the Windows toolchain even on Linux). At the same
# time, we don't want the definition in the bindist repository,
# because then we need to download the bindist first before we can
# see the toolchain definition. The solution is to add the
# toolchain definition in its own special repository.
_ghc_bindist(
name = bindist_name,
version = version,
target = target,
**extra_attrs
)
_ghc_bindist_toolchain(
name = toolchain_name,
bindist_name = bindist_name,
version = version,
compiler_flags = compiler_flags,
haddock_flags = haddock_flags,
repl_ghci_args = repl_ghci_args,
target = target,
)
native.register_toolchains("@{}//:toolchain".format(toolchain_name))
def haskell_register_ghc_bindists(
version,
compiler_flags = None,
haddock_flags = None,
repl_ghci_args = None):
"""Register GHC binary distributions for all platforms as toolchains.
Toolchains can be used to compile Haskell code. This function
registers one toolchain for each known binary distribution on all
platforms of the given GHC version. During the build, one
toolchain will be selected based on the host and target platforms
(See [toolchain resolution][toolchain-resolution]).
[toolchain-resolution]: https://docs.bazel.build/versions/master/toolchains.html#toolchain-resolution
"""
if not GHC_BINDIST.get(version):
fail("Binary distribution of GHC {} not available.".format(version))
for target in GHC_BINDIST[version]:
ghc_bindist(
name = "io_tweag_rules_haskell_ghc_{}".format(target),
target = target,
version = version,
compiler_flags = compiler_flags,
haddock_flags = haddock_flags,
repl_ghci_args = repl_ghci_args,
)

View file

@ -0,0 +1,312 @@
"""Haddock support"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"HaddockInfo",
"HaskellInfo",
"HaskellLibraryInfo",
)
load(":private/context.bzl", "haskell_context", "render_env")
load(":private/set.bzl", "set")
def _get_haddock_path(package_id):
"""Get path to Haddock file of a package given its id.
Args:
package_id: string, package id.
Returns:
string: relative path to haddock file.
"""
return package_id + ".haddock"
def _haskell_doc_aspect_impl(target, ctx):
if HaskellInfo not in target or HaskellLibraryInfo not in target:
return []
# Packages imported via `//haskell:import.bzl%haskell_import` already
# contain an `HaddockInfo` provider, so we just forward it
if HaddockInfo in target:
return []
hs = haskell_context(ctx, ctx.rule.attr)
package_id = target[HaskellLibraryInfo].package_id
html_dir_raw = "doc-{0}".format(package_id)
html_dir = ctx.actions.declare_directory(html_dir_raw)
haddock_file = ctx.actions.declare_file(_get_haddock_path(package_id))
# XXX Haddock really wants a version number, so invent one from
# thin air. See https://github.com/haskell/haddock/issues/898.
if target[HaskellLibraryInfo].version:
version = target[HaskellLibraryInfo].version
else:
version = "0"
args = ctx.actions.args()
args.add("--package-name={0}".format(package_id))
args.add("--package-version={0}".format(version))
args.add_all([
"-D",
haddock_file.path,
"-o",
html_dir.path,
"--html",
"--hoogle",
"--title={0}".format(package_id),
"--hyperlinked-source",
])
transitive_haddocks = {}
transitive_html = {}
for dep in ctx.rule.attr.deps:
if HaddockInfo in dep:
transitive_haddocks.update(dep[HaddockInfo].transitive_haddocks)
transitive_html.update(dep[HaddockInfo].transitive_html)
for pid in transitive_haddocks:
args.add("--read-interface=../{0},{1}".format(
pid,
transitive_haddocks[pid].path,
))
prebuilt_deps = ctx.actions.args()
for dep in set.to_list(target[HaskellInfo].prebuilt_dependencies):
prebuilt_deps.add(dep.package)
prebuilt_deps.use_param_file(param_file_arg = "%s", use_always = True)
compile_flags = ctx.actions.args()
for x in target[HaskellInfo].compile_flags:
compile_flags.add_all(["--optghc", x])
compile_flags.add_all([x.path for x in set.to_list(target[HaskellInfo].source_files)])
compile_flags.add("-v0")
# haddock flags should take precedence over ghc args, hence are in
# last position
compile_flags.add_all(hs.toolchain.haddock_flags)
locale_archive_depset = (
depset([hs.toolchain.locale_archive]) if hs.toolchain.locale_archive != None else depset()
)
# TODO(mboes): we should be able to instantiate this template only
# once per toolchain instance, rather than here.
haddock_wrapper = ctx.actions.declare_file("haddock_wrapper-{}".format(hs.name))
ctx.actions.expand_template(
template = ctx.file._haddock_wrapper_tpl,
output = haddock_wrapper,
substitutions = {
"%{ghc-pkg}": hs.tools.ghc_pkg.path,
"%{haddock}": hs.tools.haddock.path,
# XXX Workaround
# https://github.com/bazelbuild/bazel/issues/5980.
"%{env}": render_env(hs.env),
},
is_executable = True,
)
# Transitive library dependencies for runtime.
trans_link_ctx = target[HaskellInfo].transitive_cc_dependencies.dynamic_linking
trans_libs = trans_link_ctx.libraries_to_link.to_list()
ctx.actions.run(
inputs = depset(transitive = [
set.to_depset(target[HaskellInfo].package_databases),
set.to_depset(target[HaskellInfo].interface_dirs),
set.to_depset(target[HaskellInfo].source_files),
target[HaskellInfo].extra_source_files,
set.to_depset(target[HaskellInfo].dynamic_libraries),
depset(trans_libs),
depset(transitive_haddocks.values()),
depset(transitive_html.values()),
target[CcInfo].compilation_context.headers,
depset([
hs.tools.ghc_pkg,
hs.tools.haddock,
]),
locale_archive_depset,
]),
outputs = [haddock_file, html_dir],
mnemonic = "HaskellHaddock",
progress_message = "HaskellHaddock {}".format(ctx.label),
executable = haddock_wrapper,
arguments = [
prebuilt_deps,
args,
compile_flags,
],
use_default_shell_env = True,
)
transitive_html.update({package_id: html_dir})
transitive_haddocks.update({package_id: haddock_file})
haddock_info = HaddockInfo(
package_id = package_id,
transitive_html = transitive_html,
transitive_haddocks = transitive_haddocks,
)
output_files = OutputGroupInfo(default = transitive_html.values())
return [haddock_info, output_files]
haskell_doc_aspect = aspect(
_haskell_doc_aspect_impl,
attrs = {
"_haddock_wrapper_tpl": attr.label(
allow_single_file = True,
default = Label("@io_tweag_rules_haskell//haskell:private/haddock_wrapper.sh.tpl"),
),
},
attr_aspects = ["deps"],
toolchains = ["@io_tweag_rules_haskell//haskell:toolchain"],
)
def _haskell_doc_rule_impl(ctx):
hs = haskell_context(ctx)
# Reject cases when number of dependencies is 0.
if not ctx.attr.deps:
fail("haskell_doc needs at least one haskell_library component in deps")
doc_root_raw = ctx.attr.name
haddock_dict = {}
html_dict_original = {}
all_caches = set.empty()
for dep in ctx.attr.deps:
if HaddockInfo in dep:
html_dict_original.update(dep[HaddockInfo].transitive_html)
haddock_dict.update(dep[HaddockInfo].transitive_haddocks)
if HaskellInfo in dep:
set.mutable_union(
all_caches,
dep[HaskellInfo].package_databases,
)
# Copy docs of Bazel deps into predefined locations under the root doc
# directory.
html_dict_copied = {}
doc_root_path = ""
for package_id in html_dict_original:
html_dir = html_dict_original[package_id]
output_dir = ctx.actions.declare_directory(
paths.join(
doc_root_raw,
package_id,
),
)
doc_root_path = paths.dirname(output_dir.path)
html_dict_copied[package_id] = output_dir
ctx.actions.run_shell(
inputs = [html_dir],
outputs = [output_dir],
command = """
mkdir -p "{doc_dir}"
# Copy Haddocks of a dependency.
cp -R -L "{html_dir}/." "{target_dir}"
""".format(
doc_dir = doc_root_path,
html_dir = html_dir.path,
target_dir = output_dir.path,
),
)
# Do one more Haddock call to generate the unified index
index_root_raw = paths.join(doc_root_raw, "index")
index_root = ctx.actions.declare_directory(index_root_raw)
args = ctx.actions.args()
args.add_all([
"-o",
index_root.path,
"--title={0}".format(ctx.attr.name),
"--gen-index",
"--gen-contents",
])
if ctx.attr.index_transitive_deps:
# Include all packages in the unified index.
for package_id in html_dict_copied:
args.add("--read-interface=../{0},{1}".format(
package_id,
haddock_dict[package_id].path,
))
else:
# Include only direct dependencies.
for dep in ctx.attr.deps:
if HaddockInfo in dep:
package_id = dep[HaddockInfo].package_id
args.add("--read-interface=../{0},{1}".format(
package_id,
haddock_dict[package_id].path,
))
for cache in set.to_list(all_caches):
args.add("--optghc=-package-db={0}".format(cache.dirname))
locale_archive_depset = (
depset([hs.toolchain.locale_archive]) if hs.toolchain.locale_archive != None else depset()
)
ctx.actions.run(
inputs = depset(transitive = [
set.to_depset(all_caches),
depset(html_dict_copied.values()),
depset(haddock_dict.values()),
locale_archive_depset,
]),
outputs = [index_root],
mnemonic = "HaskellHaddockIndex",
executable = hs.tools.haddock,
arguments = [args],
)
return [DefaultInfo(
files = depset(html_dict_copied.values() + [index_root]),
)]
haskell_doc = rule(
_haskell_doc_rule_impl,
attrs = {
"deps": attr.label_list(
aspects = [haskell_doc_aspect],
doc = "List of Haskell libraries to generate documentation for.",
),
"index_transitive_deps": attr.bool(
default = False,
doc = "Whether to include documentation of transitive dependencies in index.",
),
},
toolchains = ["@io_tweag_rules_haskell//haskell:toolchain"],
)
"""Create API documentation.
Builds API documentation (using [Haddock][haddock]) for the given
Haskell libraries. It will automatically build documentation for any
transitive dependencies to allow for cross-package documentation
linking.
Example:
```bzl
haskell_library(
name = "my-lib",
...
)
haskell_doc(
name = "my-lib-doc",
deps = [":my-lib"],
)
```
[haddock]: http://haskell-haddock.readthedocs.io/en/latest/
"""

View file

@ -0,0 +1,353 @@
"""Core Haskell rules"""
load(
":cc.bzl",
_cc_haskell_import = "cc_haskell_import",
_haskell_cc_import = "haskell_cc_import",
)
load(
":doctest.bzl",
_haskell_doctest = "haskell_doctest",
_haskell_doctest_toolchain = "haskell_doctest_toolchain",
)
load(
":ghc_bindist.bzl",
_ghc_bindist = "ghc_bindist",
_haskell_register_ghc_bindists = "haskell_register_ghc_bindists",
)
load(
":haddock.bzl",
_haskell_doc = "haskell_doc",
_haskell_doc_aspect = "haskell_doc_aspect",
)
load(
":lint.bzl",
_haskell_lint = "haskell_lint",
_haskell_lint_aspect = "haskell_lint_aspect",
)
load(
":private/haskell_impl.bzl",
_haskell_binary_impl = "haskell_binary_impl",
_haskell_library_impl = "haskell_library_impl",
_haskell_test_impl = "haskell_test_impl",
_haskell_toolchain_library_impl = "haskell_toolchain_library_impl",
)
load(
":repl.bzl",
_haskell_repl = "haskell_repl",
_haskell_repl_aspect = "haskell_repl_aspect",
)
# For re-exports:
load(
":protobuf.bzl",
_haskell_proto_library = "haskell_proto_library",
_haskell_proto_toolchain = "haskell_proto_toolchain",
)
load(
":toolchain.bzl",
_haskell_register_toolchains = "haskell_register_toolchains",
_haskell_toolchain = "haskell_toolchain",
)
load(
":plugins.bzl",
_ghc_plugin = "ghc_plugin",
)
_haskell_common_attrs = {
"src_strip_prefix": attr.string(
doc = "Directory in which module hierarchy starts.",
),
"srcs": attr.label_list(
allow_files = [".hs", ".hsc", ".lhs", ".hs-boot", ".lhs-boot", ".h"],
doc = "Haskell source files.",
),
"extra_srcs": attr.label_list(
allow_files = True,
doc = "Extra (non-Haskell) source files that will be needed at compile time (e.g. by Template Haskell).",
),
"deps": attr.label_list(
doc = "List of other Haskell libraries to be linked to this target.",
),
"data": attr.label_list(
doc = "See [Bazel documentation](https://docs.bazel.build/versions/master/be/common-definitions.html#common.data).",
allow_files = True,
),
"compiler_flags": attr.string_list(
doc = "Flags to pass to Haskell compiler.",
),
"repl_ghci_args": attr.string_list(
doc = "Arbitrary extra arguments to pass to GHCi. This extends `compiler_flags` and `repl_ghci_args` from the toolchain",
),
"runcompile_flags": attr.string_list(
doc = "Arbitrary extra arguments to pass to runghc. This extends `compiler_flags` and `repl_ghci_args` from the toolchain",
),
"plugins": attr.label_list(
doc = "Compiler plugins to use during compilation.",
),
"_ghci_script": attr.label(
allow_single_file = True,
default = Label("@io_tweag_rules_haskell//haskell:assets/ghci_script"),
),
"_ghci_repl_wrapper": attr.label(
allow_single_file = True,
default = Label("@io_tweag_rules_haskell//haskell:private/ghci_repl_wrapper.sh"),
),
"_ls_modules": attr.label(
executable = True,
cfg = "host",
default = Label("@io_tweag_rules_haskell//haskell:ls_modules"),
),
"_version_macros": attr.label(
executable = True,
cfg = "host",
default = Label("@io_tweag_rules_haskell//haskell:version_macros"),
),
"_cc_toolchain": attr.label(
default = Label("@bazel_tools//tools/cpp:current_cc_toolchain"),
),
}
def _mk_binary_rule(**kwargs):
"""Generate a rule that compiles a binary.
This is useful to create variations of a Haskell binary compilation
rule without having to copy and paste the actual `rule` invocation.
Args:
**kwargs: Any additional keyword arguments to pass to `rule`.
Returns:
Rule: Haskell binary compilation rule.
"""
is_test = kwargs.get("test", False)
attrs = dict(
_haskell_common_attrs,
linkstatic = attr.bool(
default = True,
doc = "Link dependencies statically wherever possible. Some system libraries may still be linked dynamically, as are libraries for which there is no static library. So the resulting executable will still be dynamically linked, hence only mostly static.",
),
main_function = attr.string(
default = "Main.main",
doc = """A function with type `IO _`, either the qualified name of a function from any module or the bare name of a function from a `Main` module. It is also possible to give the qualified name of any module exposing a `main` function.""",
),
version = attr.string(
doc = "Executable version. If this is specified, CPP version macros will be generated for this build.",
),
)
# Tests have an extra fields regarding code coverage.
if is_test:
attrs.update({
"expected_covered_expressions_percentage": attr.int(
default = -1,
doc = "The expected percentage of expressions covered by testing.",
),
"expected_uncovered_expression_count": attr.int(
default = -1,
doc = "The expected number of expressions which are not covered by testing.",
),
"strict_coverage_analysis": attr.bool(
default = False,
doc = "Requires that the coverage metric is matched exactly, even doing better than expected is not allowed.",
),
"coverage_report_format": attr.string(
default = "text",
doc = """The format to output the coverage report in. Supported values: "text", "html". Default: "text".
Report can be seen in the testlog XML file, or by setting --test_output=all when running bazel coverage.
""",
),
"experimental_coverage_source_patterns": attr.string_list(
default = ["//..."],
doc = """The path patterns specifying which targets to analyze for test coverage metrics.
Wild-card targets such as //... or //:all are allowed. The paths must be relative to the workspace, which means they must start with "//".
Note, this attribute may leave experimental status depending on the outcome of https://github.com/bazelbuild/bazel/issues/7763.
""",
),
"_coverage_wrapper_template": attr.label(
allow_single_file = True,
default = Label("@io_tweag_rules_haskell//haskell:private/coverage_wrapper.sh.tpl"),
),
"_bash_runfiles": attr.label(
allow_single_file = True,
default = Label("@bazel_tools//tools/bash/runfiles:runfiles"),
),
})
return rule(
# If _mk_binary_rule was called with test = True, we want to use the test binary implementation
_haskell_test_impl if is_test else _haskell_binary_impl,
executable = True,
attrs = attrs,
outputs = {
"runghc": "%{name}@runghc",
"repl": "%{name}@repl",
"repl_deprecated": "%{name}-repl",
},
toolchains = [
"@io_tweag_rules_haskell//haskell:toolchain",
],
**kwargs
)
haskell_test = _mk_binary_rule(test = True)
"""Build a test suite.
Additionally, it accepts [all common bazel test rule
fields][bazel-test-attrs]. This allows you to influence things like
timeout and resource allocation for the test.
[bazel-test-attrs]: https://docs.bazel.build/versions/master/be/common-definitions.html#common-attributes-tests
"""
haskell_binary = _mk_binary_rule()
"""Build an executable from Haskell source.
Example:
```bzl
haskell_binary(
name = "hello",
srcs = ["Main.hs", "Other.hs"],
deps = ["//lib:some_lib"]
)
```
Every `haskell_binary` target also defines an optional REPL target that is
not built by default, but can be built on request. The name of the REPL
target is the same as the name of binary with `"@repl"` added at the end.
For example, the target above also defines `main@repl`.
You can call the REPL like this (requires Bazel 0.15 or later):
```
$ bazel run //:hello@repl
```
"""
haskell_library = rule(
_haskell_library_impl,
attrs = dict(
_haskell_common_attrs,
hidden_modules = attr.string_list(
doc = "Modules that should be unavailable for import by dependencies.",
),
exports = attr.label_keyed_string_dict(
doc = "A dictionary mapping dependencies to module reexports that should be available for import by dependencies.",
),
linkstatic = attr.bool(
default = False,
doc = "Create a static library, not both a static and a shared library.",
),
version = attr.string(
doc = """Library version. Not normally necessary unless to build a library
originally defined as a Cabal package. If this is specified, CPP version macro will be generated.""",
),
),
outputs = {
"runghc": "%{name}@runghc",
"repl": "%{name}@repl",
"repl_deprecated": "%{name}-repl",
},
toolchains = [
"@io_tweag_rules_haskell//haskell:toolchain",
],
)
"""Build a library from Haskell source.
Example:
```bzl
haskell_library(
name = "hello-lib",
srcs = glob(["src/**/*.hs"]),
src_strip_prefix = "src",
deps = [
"//hello-sublib:lib",
],
exports = {
"//hello-sublib:lib": "Lib1 as HelloLib1, Lib2",
},
)
```
Every `haskell_library` target also defines an optional REPL target that is
not built by default, but can be built on request. It works the same way as
for `haskell_binary`.
"""
haskell_toolchain_library = rule(
_haskell_toolchain_library_impl,
attrs = dict(
package = attr.string(
doc = "The name of a GHC package not built by Bazel. Defaults to the name of the rule.",
),
_version_macros = attr.label(
executable = True,
cfg = "host",
default = Label("@io_tweag_rules_haskell//haskell:version_macros"),
),
),
toolchains = [
"@io_tweag_rules_haskell//haskell:toolchain",
],
)
"""Import packages that are prebuilt outside of Bazel.
Example:
```bzl
haskell_toolchain_library(
name = "base_pkg",
package = "base",
)
haskell_library(
name = "hello-lib",
srcs = ["Lib.hs"],
deps = [
":base_pkg",
"//hello-sublib:lib",
],
)
```
Use this rule to make dependencies that are prebuilt (supplied as part
of the compiler toolchain) available as targets.
"""
haskell_doc = _haskell_doc
haskell_doc_aspect = _haskell_doc_aspect
haskell_lint = _haskell_lint
haskell_lint_aspect = _haskell_lint_aspect
haskell_doctest = _haskell_doctest
haskell_doctest_toolchain = _haskell_doctest_toolchain
haskell_register_toolchains = _haskell_register_toolchains
haskell_register_ghc_bindists = _haskell_register_ghc_bindists
haskell_repl = _haskell_repl
haskell_repl_aspect = _haskell_repl_aspect
haskell_toolchain = _haskell_toolchain
haskell_proto_library = _haskell_proto_library
haskell_proto_toolchain = _haskell_proto_toolchain
ghc_bindist = _ghc_bindist
haskell_cc_import = _haskell_cc_import
cc_haskell_import = _cc_haskell_import
ghc_plugin = _ghc_plugin

View file

@ -0,0 +1,118 @@
"""Importing prebuilt packages into bazel"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"HaddockInfo",
"HaskellInfo",
"HaskellLibraryInfo",
"empty_HaskellCcInfo",
)
load(":private/context.bzl", "haskell_context")
load(":private/path_utils.bzl", "copy_all", "link_forest", "ln")
load(":private/set.bzl", "set")
load(":private/version_macros.bzl", "generate_version_macros")
def _haskell_import_impl(ctx):
hs = haskell_context(ctx)
package_cache = ctx.actions.declare_file(
paths.join("package.conf.d", "package.cache"),
)
local_package_confs = link_forest(
ctx = ctx,
srcs = ctx.attr.package_confs.files,
sibling = package_cache,
)
local_haddock_html = ctx.actions.declare_directory("haddock-html")
copy_all(
ctx = ctx,
srcs = ctx.attr.haddock_html.files,
dest = local_haddock_html,
)
ctx.actions.run(
outputs = [package_cache],
inputs = local_package_confs,
mnemonic = "HaskellCreatePackageCache",
executable = hs.tools.ghc_pkg,
arguments = [
"recache",
"--package-db",
package_cache.dirname,
],
)
ln(ctx, package_cache, ctx.outputs.cache)
dependencies_caches = set.singleton(package_cache)
for dep in ctx.attr.deps:
if HaskellInfo in dep:
set.mutable_union(dependencies_caches, dep[HaskellInfo].package_databases)
deps_ids = [
dep[HaskellLibraryInfo].package_id
for dep in ctx.attr.deps
if HaskellLibraryInfo in dep
]
version_macros = set.empty()
if ctx.attr.version != None:
version_macros = set.singleton(
generate_version_macros(ctx, hs.name, ctx.attr.version),
)
libInfo = HaskellLibraryInfo(
package_id = ctx.attr.package_id,
version = ctx.attr.version,
)
buildInfo = HaskellInfo(
package_ids = set.from_list([ctx.attr.package_id] + deps_ids),
package_databases = dependencies_caches,
version_macros = version_macros,
import_dirs = [],
source_files = set.empty(),
extra_source_files = set.empty(),
static_libraries = [],
static_libraries_prof = [],
dynamic_libraries = set.empty(),
interface_dirs = set.empty(),
compile_flags = [],
prebuilt_dependencies = set.empty(),
direct_prebuilt_deps = set.empty(),
cc_dependencies = empty_HaskellCcInfo(),
transitive_cc_dependencies = empty_HaskellCcInfo(),
)
html_files = ctx.attr.haddock_html.files.to_list()
transitive_html = {ctx.attr.package_id: local_haddock_html} if html_files != [] else {}
interface_files = ctx.attr.haddock_interfaces.files.to_list()
transitive_haddocks = {ctx.attr.package_id: interface_files[0]} if interface_files != [] else {}
haddockInfo = HaddockInfo(
package_id = ctx.attr.package_id,
transitive_html = transitive_html,
transitive_haddocks = transitive_haddocks,
)
return [buildInfo, libInfo, haddockInfo]
haskell_import = rule(
_haskell_import_impl,
attrs = dict(
package_id = attr.string(doc = "Workspace unique package identifier"),
deps = attr.label_list(doc = "Haskell dependencies for the package"),
version = attr.string(doc = "Package version."),
haddock_interfaces = attr.label(doc = "List of haddock interfaces"),
haddock_html = attr.label(doc = "List of haddock html dirs"),
package_confs = attr.label(doc = "List of ghc-pkg package.conf files"),
_version_macros = attr.label(
executable = True,
cfg = "host",
default = Label("@io_tweag_rules_haskell//haskell:version_macros"),
),
),
outputs = {
"cache": "%{name}-cache",
},
toolchains = ["@io_tweag_rules_haskell//haskell:toolchain"],
)

View file

@ -0,0 +1,137 @@
"""Linting"""
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"HaskellInfo",
"HaskellLibraryInfo",
"HaskellLintInfo",
)
load(":private/context.bzl", "haskell_context", "render_env")
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load(
":private/path_utils.bzl",
"target_unique_name",
)
load(":providers.bzl", "get_libs_for_ghc_linker")
load(":private/set.bzl", "set")
def _collect_lint_logs(deps):
lint_logs = set.empty()
for dep in deps:
if HaskellLintInfo in dep:
set.mutable_union(lint_logs, dep[HaskellLintInfo].outputs)
return lint_logs
def _haskell_lint_rule_impl(ctx):
return [DefaultInfo(
files = set.to_depset(_collect_lint_logs(ctx.attr.deps)),
)]
def _haskell_lint_aspect_impl(target, ctx):
hs = haskell_context(ctx, ctx.rule.attr)
if HaskellInfo not in target:
return []
hs_info = target[HaskellInfo]
lib_info = target[HaskellLibraryInfo] if HaskellLibraryInfo in target else None
args = ctx.actions.args()
args.add_all([
"-O0",
"-v0",
"-fno-code",
"-Wall",
"-Werror",
"-Wcompat",
"-Wincomplete-record-updates",
"-Wincomplete-uni-patterns",
"-Wredundant-constraints",
"-Wnoncanonical-monad-instances",
"--make",
])
args.add_all(pkg_info_to_compile_flags(expose_packages(
hs_info,
lib_info,
use_direct = False,
use_my_pkg_id = None,
custom_package_databases = None,
version = ctx.rule.attr.version,
)))
sources = set.to_list(hs_info.source_files)
args.add_all(sources)
lint_log = ctx.actions.declare_file(
target_unique_name(hs, "lint-log"),
)
# Transitive library dependencies for runtime.
(library_deps, ld_library_deps, _ghc_env) = get_libs_for_ghc_linker(
hs,
hs_info.transitive_cc_dependencies,
)
ctx.actions.run_shell(
inputs = depset(transitive = [
depset(sources),
set.to_depset(hs_info.package_databases),
set.to_depset(hs_info.interface_dirs),
set.to_depset(hs_info.dynamic_libraries),
depset(library_deps),
depset(ld_library_deps),
depset([hs.tools.ghc]),
]),
outputs = [lint_log],
mnemonic = "HaskellLint",
progress_message = "HaskellLint {}".format(ctx.label),
command = """
{env}
{ghc} "$@" > {output} 2>&1 || rc=$? && cat {output} && exit $rc
""".format(
ghc = hs.tools.ghc.path,
output = lint_log.path,
# XXX Workaround
# https://github.com/bazelbuild/bazel/issues/5980.
env = render_env(hs.env),
),
arguments = [args],
use_default_shell_env = True,
)
lint_info = HaskellLintInfo(outputs = set.singleton(lint_log))
output_files = OutputGroupInfo(default = [lint_log])
return [lint_info, output_files]
haskell_lint_aspect = aspect(
_haskell_lint_aspect_impl,
attr_aspects = ["deps"],
toolchains = ["@io_tweag_rules_haskell//haskell:toolchain"],
)
haskell_lint = rule(
_haskell_lint_rule_impl,
attrs = {
"deps": attr.label_list(
aspects = [haskell_lint_aspect],
doc = "List of Haskell targets to lint.",
),
},
toolchains = ["@io_tweag_rules_haskell//haskell:toolchain"],
)
"""Check source code of targets in `deps` using a restrictive set of GHC
flags.
The following flags will be used:
* `-Wall`
* `-Werror`
* `-Wcompat`
* `-Wincomplete-record-updates`
* `-Wincomplete-uni-patterns`
* `-Wredundant-constraints`
"""

View file

@ -0,0 +1,119 @@
/**
Generate a bazel-friendly nix package containing
- The haskell package itself
- Its documentation
- A bazel file ready to be loaded from the `BUILD` file and containing the
right call to `haskell_import`
*/
{ runCommand, lib, writeTextDir, symlinkJoin }:
let
/* Generate the BUILD file for the package */
genBuildFile =
{ package_name, package, ghc }:
runCommand "${package_name}-BUILD" {
preferLocalBuild = true;
allowSubstitutes = false;
ghc_pkg = "${ghc}/bin/ghc-pkg --simple-output -v0";
GHC_PACKAGE_PATH = "${package}/lib/${ghc.name}/package.conf.d";
inherit package_name;
} ''
query_field () {
$ghc_pkg field ${package_name} "$@"
}
query_haddock () {
echo -n '['
for FIELD in $(query_field "$@"); do
echo -n "\"$(echo "$FIELD" | cut -d/ -f5-)*\","
echo -n "\"$(echo "$FIELD" | cut -d/ -f5-)/*\","
done
echo -n ']'
}
query_list () {
echo -n '['
for FIELD in $(query_field "$@"); do
echo -n '"'
echo -n $(echo "$FIELD" | cut -d/ -f5-)
echo -n '",'
done
echo -n ']'
}
get_deps () {
echo -n '['
for DEP in $(query_field depends); do
DEPNAME=$(echo $DEP | sed 's/-[0-9].*//')
# Because of cabal's "internal libraries", we may have a package
# apparently depending on itself, so we have to filter out this
# corner-case (see
# https://github.com/tweag/rules_haskell/pull/442#discussion_r219859467)
if [[ -n $DEPNAME && $DEPNAME != $(query_field name) ]]; then
echo -n "\"@hackage-$DEPNAME\","
fi
done
echo -n ']'
}
mkdir -p $out
cat <<EOF > $out/BUILD.bzl
load("@io_tweag_rules_haskell//haskell:import.bzl", haskell_import_new = "haskell_import")
deps_repos = $(get_deps)
def targets():
haskell_import_new(
name = "pkg",
deps = [ dep + "//:pkg" for dep in deps_repos],
package_id = "$(query_field id)",
version = "$(query_field version)",
package_confs = "//:package_conf",
haddock_interfaces = "//:interfaces",
haddock_html = "//:html",
)
native.filegroup(
name = "html",
srcs = native.glob($(query_haddock haddock-html), exclude_directories=1),
)
native.filegroup(
name = "interfaces",
srcs = native.glob($(query_haddock haddock-interfaces), exclude_directories=0),
)
native.filegroup(
name = "bin",
srcs = native.glob(["bin/*"]),
)
native.filegroup(
name = "package_conf",
srcs = native.glob(["lib*/${ghc.name}/package.conf.d/$(query_field name)*.conf"]),
)
EOF
'';
genAllBuilds = pkgSet:
let newSet =
lib.mapAttrs (package_name: package:
let
# Some nix packages are actually `null` because the haskell package is
# bundled with ghc (so it doesn't have a custom derivation of its own).
# For these, we simply pass the ghc derivation instead of theirs.
real_package = if builtins.isNull package then pkgSet.ghc else package;
buildFile = genBuildFile {
inherit (pkgSet) ghc;
inherit package_name;
package = real_package;
};
in
symlinkJoin {
name = package_name + "-bazel";
paths = [ real_package (real_package.doc or null) buildFile ];
}
)
pkgSet;
in
newSet // {
packageNames = writeTextDir
"all-haskell-packages.bzl"
("packages =" + builtins.toJSON (builtins.attrNames newSet));
};
in
genAllBuilds

View file

@ -0,0 +1,354 @@
"""Workspace rules (Nixpkgs)"""
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load(
"@io_tweag_rules_nixpkgs//nixpkgs:nixpkgs.bzl",
"nixpkgs_package",
)
def haskell_nixpkgs_package(
name,
attribute_path,
nix_file_deps = [],
repositories = {},
build_file_content = None,
build_file = None,
**kwargs):
"""Load a single haskell package.
The package is expected to be in the form of the packages generated by
`genBazelBuild.nix`
"""
repositories = dicts.add(
{"bazel_haskell_wrapper": "@io_tweag_rules_haskell//haskell:nix/default.nix"},
repositories,
)
nixpkgs_args = dict(
name = name,
attribute_path = attribute_path,
build_file_content = build_file_content,
nix_file_deps = nix_file_deps + ["@io_tweag_rules_haskell//haskell:nix/default.nix"],
repositories = repositories,
**kwargs
)
if build_file_content:
nixpkgs_args["build_file_content"] = build_file_content
elif build_file:
nixpkgs_args["build_file"] = build_file
else:
nixpkgs_args["build_file_content"] = """
package(default_visibility = ["//visibility:public"])
load("@io_tweag_rules_haskell//haskell:import.bzl", haskell_import_new = "haskell_import")
load(":BUILD.bzl", "targets")
targets()
"""
nixpkgs_package(
**nixpkgs_args
)
def _bundle_impl(repository_ctx):
build_file_content = """
package(default_visibility = ["//visibility:public"])
"""
for package in repository_ctx.attr.packages:
build_file_content += """
alias(
name = "{package}",
actual = "@{base_repo}-{package}//:pkg",
)
""".format(
package = package,
base_repo = repository_ctx.attr.base_repository,
)
repository_ctx.file("BUILD", build_file_content)
_bundle = repository_rule(
attrs = {
"packages": attr.string_list(),
"base_repository": attr.string(),
},
implementation = _bundle_impl,
)
"""
Generate an alias from `@base_repo//:package` to `@base_repo-package//:pkg` for
each one of the input package
"""
def haskell_nixpkgs_packages(name, base_attribute_path, packages, **kwargs):
"""Import a set of haskell packages from nixpkgs.
This takes as input the same arguments as
[nixpkgs_package](https://github.com/tweag/rules_nixpkgs#nixpkgs_package),
expecting the `attribute_path` to resolve to a set of haskell packages
(such as `haskellPackages` or `haskell.packages.ghc822`) preprocessed by
the `genBazelBuild` function. It also takes as input a list of packages to
import (which can be generated by the `gen_packages_list` function).
"""
for package in packages:
haskell_nixpkgs_package(
name = name + "-" + package,
attribute_path = base_attribute_path + "." + package,
**kwargs
)
_bundle(
name = name,
packages = packages,
base_repository = name,
)
def _is_nix_platform(repository_ctx):
return repository_ctx.which("nix-build") != None
def _gen_imports_impl(repository_ctx):
repository_ctx.file("BUILD", "")
extra_args_raw = ""
for foo, bar in repository_ctx.attr.extra_args.items():
extra_args_raw += foo + " = " + bar + ", "
bzl_file_content = """
load("{repo_name}", "packages")
load("@io_tweag_rules_haskell//haskell:nixpkgs.bzl", "haskell_nixpkgs_packages")
def import_packages(name):
haskell_nixpkgs_packages(
name = name,
packages = packages,
{extra_args_raw}
)
""".format(
repo_name = repository_ctx.attr.packages_list_file,
extra_args_raw = extra_args_raw,
)
# A dummy 'packages.bzl' file with a no-op 'import_packages()' on unsupported platforms
bzl_file_content_unsupported_platform = """
def import_packages(name):
return
"""
if _is_nix_platform(repository_ctx):
repository_ctx.file("packages.bzl", bzl_file_content)
else:
repository_ctx.file("packages.bzl", bzl_file_content_unsupported_platform)
_gen_imports_str = repository_rule(
implementation = _gen_imports_impl,
attrs = dict(
packages_list_file = attr.label(doc = "A list containing the list of packages to import"),
# We pass the extra arguments to `haskell_nixpkgs_packages` as strings
# since we can't forward arbitrary arguments in a rule and they will be
# converted to strings anyways.
extra_args = attr.string_dict(doc = "Extra arguments for `haskell_nixpkgs_packages`"),
),
)
"""
Generate a repository containing a file `packages.bzl` which imports the given
packages list.
"""
def _gen_imports(name, packages_list_file, extra_args):
"""
A wrapper around `_gen_imports_str` which allows passing an arbitrary set of
`extra_args` instead of a set of strings
"""
extra_args_str = {label: repr(value) for (label, value) in extra_args.items()}
_gen_imports_str(
name = name,
packages_list_file = packages_list_file,
extra_args = extra_args_str,
)
def haskell_nixpkgs_packageset(name, base_attribute_path, repositories = {}, **kwargs):
"""Import all the available haskell packages.
The arguments are the same as the arguments of ``nixpkgs_package``, except
for the ``base_attribute_path`` which should point to an `haskellPackages`
set in the nix expression
Example:
In `haskellPackages.nix`:
```nix
with import <nixpkgs> {};
let wrapPackages = callPackage <bazel_haskell_wrapper> { }; in
{ haskellPackages = wrapPackages haskell.packages.ghc822; }
```
In your `WORKSPACE`
```bazel
# Define a nix repository to fetch the packages from
load("@io_tweag_rules_nixpkgs//nixpkgs:nixpkgs.bzl",
"nixpkgs_git_repository")
nixpkgs_git_repository(
name = "nixpkgs",
revision = "9a787af6bc75a19ac9f02077ade58ddc248e674a",
)
load("@io_tweag_rules_haskell//haskell:nixpkgs.bzl",
"haskell_nixpkgs_packageset",
# Generate a list of all the available haskell packages
haskell_nixpkgs_packageset(
name = "hackage-packages",
repositories = {"@nixpkgs": "nixpkgs"},
nix_file = "//haskellPackages.nix",
base_attribute_path = "haskellPackages",
)
load("@hackage-packages//:packages.bzl", "import_packages")
import_packages(name = "hackage")
```
Then in your `BUILD` files, you can access to the whole of hackage as
`@hackage//:{your-package-name}`
"""
repositories = dicts.add(
{"bazel_haskell_wrapper": "@io_tweag_rules_haskell//haskell:nix/default.nix"},
repositories,
)
nixpkgs_package(
name = name + "-packages-list",
attribute_path = base_attribute_path + ".packageNames",
repositories = repositories,
build_file_content = """
exports_files(["all-haskell-packages.bzl"])
""",
fail_not_supported = False,
**kwargs
)
_gen_imports(
name = name,
packages_list_file = "@" + name + "-packages-list//:all-haskell-packages.bzl",
extra_args = dict(
repositories = repositories,
base_attribute_path = base_attribute_path,
**kwargs
),
)
def _ghc_nixpkgs_toolchain_impl(repository_ctx):
# These constraints might look tautological, because they always
# match the host platform if it is the same as the target
# platform. But they are important to state because Bazel
# toolchain resolution prefers other toolchains with more specific
# constraints otherwise.
target_constraints = ["@bazel_tools//platforms:x86_64"]
if repository_ctx.os.name == "linux":
target_constraints.append("@bazel_tools//platforms:linux")
elif repository_ctx.os.name == "mac os x":
target_constraints.append("@bazel_tools//platforms:osx")
exec_constraints = list(target_constraints)
exec_constraints.append("@io_tweag_rules_haskell//haskell/platforms:nixpkgs")
compiler_flags_select = repository_ctx.attr.compiler_flags_select or {"//conditions:default": []}
locale_archive = repr(repository_ctx.attr.locale_archive or None)
repository_ctx.file(
"BUILD",
executable = False,
content = """
load("@io_tweag_rules_haskell//haskell:toolchain.bzl", "haskell_toolchain")
haskell_toolchain(
name = "toolchain",
tools = ["{tools}"],
version = "{version}",
compiler_flags = {compiler_flags} + {compiler_flags_select},
haddock_flags = {haddock_flags},
repl_ghci_args = {repl_ghci_args},
# On Darwin we don't need a locale archive. It's a Linux-specific
# hack in Nixpkgs.
locale_archive = {locale_archive},
exec_compatible_with = {exec_constraints},
target_compatible_with = {target_constraints},
)
""".format(
tools = "@io_tweag_rules_haskell_ghc-nixpkgs//:bin",
version = repository_ctx.attr.version,
compiler_flags = repository_ctx.attr.compiler_flags,
compiler_flags_select = "select({})".format(compiler_flags_select),
haddock_flags = repository_ctx.attr.haddock_flags,
repl_ghci_args = repository_ctx.attr.repl_ghci_args,
locale_archive = locale_archive,
exec_constraints = exec_constraints,
target_constraints = target_constraints,
),
)
_ghc_nixpkgs_toolchain = repository_rule(
_ghc_nixpkgs_toolchain_impl,
local = False,
attrs = {
# These attributes just forward to haskell_toolchain.
# They are documented there.
"version": attr.string(),
"compiler_flags": attr.string_list(),
"compiler_flags_select": attr.string_list_dict(),
"haddock_flags": attr.string_list(),
"repl_ghci_args": attr.string_list(),
"locale_archive": attr.string(),
},
)
def haskell_register_ghc_nixpkgs(
version,
build_file = None,
compiler_flags = None,
compiler_flags_select = None,
haddock_flags = None,
repl_ghci_args = None,
locale_archive = None,
attribute_path = "haskellPackages.ghc",
nix_file = None,
nix_file_deps = [],
repositories = {}):
"""Register a package from Nixpkgs as a toolchain.
Toolchains can be used to compile Haskell code. To have this
toolchain selected during [toolchain
resolution][toolchain-resolution], set a host platform that
includes the `@io_tweag_rules_haskell//haskell/platforms:nixpkgs`
constraint value.
[toolchain-resolution]: https://docs.bazel.build/versions/master/toolchains.html#toolchain-resolution
Example:
```
haskell_register_ghc_nixpkgs(
locale_archive = "@glibc_locales//:locale-archive",
atttribute_path = "haskellPackages.ghc",
version = "1.2.3", # The version of GHC
)
```
Setting the host platform can be done on the command-line like
in the following:
```
--host_platform=@io_tweag_rules_haskell//haskell/platforms:linux_x86_64_nixpkgs
```
"""
haskell_nixpkgs_package(
name = "io_tweag_rules_haskell_ghc-nixpkgs",
attribute_path = attribute_path,
build_file = build_file or "@io_tweag_rules_haskell//haskell:ghc.BUILD",
nix_file = nix_file,
nix_file_deps = nix_file_deps,
repositories = repositories,
)
_ghc_nixpkgs_toolchain(
name = "io_tweag_rules_haskell_ghc-nixpkgs-toolchain",
version = version,
compiler_flags = compiler_flags,
compiler_flags_select = compiler_flags_select,
haddock_flags = haddock_flags,
repl_ghci_args = repl_ghci_args,
locale_archive = locale_archive,
)
native.register_toolchains("@io_tweag_rules_haskell_ghc-nixpkgs-toolchain//:toolchain")

View file

@ -0,0 +1,39 @@
# This file declares constraint values for each platform supported by
# GHC. These rules follow the GHC naming convention, for example,
# //haskell/platform:linux and //haskell/platform:x86_64. See the
# config.guess in any GHC source distribution for possible platforms.
#
# These can be used in select expressions to choose platform-specifc
# sources and dependencies.
load(":list.bzl", "declare_config_settings")
declare_config_settings()
constraint_value(
name = "nixpkgs",
constraint_setting = "@bazel_tools//tools/cpp:cc_compiler",
visibility = ["//visibility:public"],
)
platform(
name = "linux_x86_64_nixpkgs",
constraint_values = [
# XXX using the platform names defined here results in a graph
# cycle for some reason.
"@bazel_tools//platforms:x86_64",
"@bazel_tools//platforms:linux",
":nixpkgs",
],
visibility = ["//visibility:public"],
)
platform(
name = "darwin_x86_64_nixpkgs",
constraint_values = [
"@bazel_tools//platforms:x86_64",
"@bazel_tools//platforms:osx",
":nixpkgs",
],
visibility = ["//visibility:public"],
)

View file

@ -0,0 +1,44 @@
OS = {
"aix": None,
"darwin": "@bazel_tools//platforms:osx",
"dragonfly": None,
"freebsd": "@bazel_tools//platforms:freebsd",
"haiku": None,
"hpux": None,
"ios": "@bazel_tools//platforms:ios",
"linux_android": "@bazel_tools//platforms:android",
"linux": "@bazel_tools//platforms:linux",
"mingw32": "@bazel_tools//platforms:windows",
"netbsd": None,
"openbsd": None,
"solaris2": None,
}
ARCH = {
"aarch64": None,
"alpha": None,
"arm64": "@bazel_tools//platforms:aarch64",
"arm": "@bazel_tools//platforms:arm",
"i386": "@bazel_tools//platforms:x86_32",
"ia64": None,
"powerpc64": None,
"powerpc64le": None,
"powerpc": "@bazel_tools//platforms:ppc",
"rs6000": None,
"sparc": None,
"x86_64": "@bazel_tools//platforms:x86_64",
}
def declare_config_settings():
for os, constraint_value in OS.items():
if constraint_value:
native.config_setting(
name = os,
constraint_values = [constraint_value],
)
for arch, constraint_value in ARCH.items():
if constraint_value:
native.config_setting(
name = arch,
constraint_values = [constraint_value],
)

View file

@ -0,0 +1,64 @@
load(":providers.bzl", "GhcPluginInfo", "HaskellLibraryInfo")
def ghc_plugin_impl(ctx):
args = ctx.attr.args
args = [ctx.expand_location(arg, ctx.attr.tools) for arg in args]
args = [ctx.expand_make_variables("args", arg, {}) for arg in args]
# XXX Ideally we'd resolve tools downstream.
(tool_inputs, tool_input_manifests) = ctx.resolve_tools(tools = ctx.attr.tools)
return [
GhcPluginInfo(
module = ctx.attr.module,
deps = ctx.attr.deps,
tool_inputs = tool_inputs,
tool_input_manifests = tool_input_manifests,
args = args,
),
]
ghc_plugin = rule(
ghc_plugin_impl,
attrs = {
"module": attr.string(
doc = "Plugin entrypoint.",
),
"deps": attr.label_list(
doc = "Plugin dependencies. These are compile-time dependencies only.",
providers = [HaskellLibraryInfo],
),
"args": attr.string_list(
doc = "Plugin options.",
),
"tools": attr.label_list(
doc = "Tools needed by the plugin when it used.",
),
},
)
"""Declare a GHC plugin.
Example:
```bzl
haskell_library(
name = "plugin-lib",
srcs = ["Plugin.hs"],
)
ghc_plugin(
name = "plugin",
module = "Plugin",
deps = [":plugin-lib"],
)
haskell_binary(
name = "some-binary",
srcs = ["Main.hs"],
plugins = [":plugin"],
```
Plugins to use during compilation by GHC are given by the `plugins`
attribute to Haskell rules. Plugins are haskell libraries with some
extra metadata, like the name of the module that acts as the
entrypoint for the plugin and plugin options.
"""

View file

@ -0,0 +1,563 @@
"""Actions for compiling Haskell source code"""
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
":private/path_utils.bzl",
"declare_compiled",
"module_name",
"target_unique_name",
)
load(":private/pkg_id.bzl", "pkg_id")
load(":private/version_macros.bzl", "version_macro_includes")
load(
":providers.bzl",
"GhcPluginInfo",
"get_libs_for_ghc_linker",
"merge_HaskellCcInfo",
)
load(":private/set.bzl", "set")
def _process_hsc_file(hs, cc, hsc_flags, hsc_inputs, hsc_file):
"""Process a single hsc file.
Args:
hs: Haskell context.
cc: CcInteropInfo, information about C dependencies.
hsc_flags: extra flags to pass to hsc2hs
hsc_inputs: extra file inputs for the hsc2hs command
hsc_file: hsc file to process.
Returns:
(File, string): Haskell source file created by processing hsc_file and
new import directory containing the produced file.
"""
args = hs.actions.args()
# Output a Haskell source file.
hsc_dir_raw = paths.join("_hsc", hs.name)
hs_out = declare_compiled(hs, hsc_file, ".hs", directory = hsc_dir_raw)
args.add_all([hsc_file.path, "-o", hs_out.path])
args.add_all(["-c", cc.tools.cc])
args.add_all(["-l", cc.tools.cc])
args.add("-ighcplatform.h")
args.add("-ighcversion.h")
args.add_all(["--cflag=" + f for f in cc.cpp_flags])
args.add_all(["--cflag=" + f for f in cc.compiler_flags])
args.add_all(["--cflag=" + f for f in cc.include_args])
args.add_all(["--lflag=" + f for f in cc.linker_flags])
args.add_all(hsc_flags)
# Add an empty PATH variable if not already specified in hs.env.
# Needed to avoid a "Couldn't read PATH" error on Windows.
#
# On Unix platforms, though, we musn't set PATH as it is automatically set up
# by the run action, unless already set in the env parameter. This triggers
# build errors when using GHC bindists on Linux.
if hs.env.get("PATH") == None and hs.toolchain.is_windows:
hs.env["PATH"] = ""
hs.actions.run(
inputs = depset(transitive = [
depset(cc.hdrs),
depset([hsc_file]),
depset(cc.files),
depset(hsc_inputs),
]),
outputs = [hs_out],
mnemonic = "HaskellHsc2hs",
executable = hs.tools.hsc2hs,
arguments = [args],
env = hs.env,
)
idir = paths.join(
hs.bin_dir.path,
hs.label.package,
hsc_dir_raw,
)
return hs_out, idir
def _compilation_defaults(hs, cc, java, dep_info, plugin_dep_info, srcs, import_dir_map, extra_srcs, user_compile_flags, with_profiling, my_pkg_id, version, plugins):
"""Compute variables common to all compilation targets (binary and library).
Returns:
struct with the following fields:
args: default argument list
compile_flags: arguments that were used to compile the package
inputs: default inputs
input_manifests: input manifests
outputs: default outputs
objects_dir: object files directory
interfaces_dir: interface files directory
source_files: set of files that contain Haskell modules
extra_source_files: depset of non-Haskell source files
import_dirs: c2hs Import hierarchy roots
env: default environment variables
"""
compile_flags = []
# GHC expects the CC compiler as the assembler, but segregates the
# set of flags to pass to it when used as an assembler. So we have
# to set both -optc and -opta.
cc_args = [
"-optc" + f
for f in cc.compiler_flags
] + [
"-opta" + f
for f in cc.compiler_flags
]
compile_flags += cc_args
interface_dir_raw = "_iface_prof" if with_profiling else "_iface"
object_dir_raw = "_obj_prof" if with_profiling else "_obj"
# Declare file directories.
#
# NOTE: We could have used -outputdir here and a single output
# directory. But keeping interface and object files separate has
# one advantage: if interface files are invariant under
# a particular code change, then we don't need to rebuild
# downstream.
if my_pkg_id:
# If we're compiling a package, put the interfaces inside the
# package directory.
interfaces_dir = hs.actions.declare_directory(
paths.join(
pkg_id.to_string(my_pkg_id),
interface_dir_raw,
),
)
else:
interfaces_dir = hs.actions.declare_directory(
paths.join(interface_dir_raw, hs.name),
)
objects_dir = hs.actions.declare_directory(
paths.join(object_dir_raw, hs.name),
)
# Default compiler flags.
compile_flags += hs.toolchain.compiler_flags
compile_flags += user_compile_flags
# Work around macOS linker limits. This fix has landed in GHC HEAD, but is
# not yet in a release; plus, we still want to support older versions of
# GHC. For details, see: https://phabricator.haskell.org/D4714
if hs.toolchain.is_darwin:
compile_flags += ["-optl-Wl,-dead_strip_dylibs"]
compile_flags.extend(
pkg_info_to_compile_flags(
expose_packages(
dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = my_pkg_id,
custom_package_databases = None,
version = version,
),
),
)
compile_flags.extend(
pkg_info_to_compile_flags(
expose_packages(
plugin_dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = my_pkg_id,
custom_package_databases = None,
version = version,
),
for_plugin = True,
),
)
header_files = []
boot_files = []
source_files = set.empty()
# Forward all "-D" and "-optP-D" flags to hsc2hs
hsc_flags = []
hsc_flags += ["--cflag=" + x for x in user_compile_flags if x.startswith("-D")]
hsc_flags += ["--cflag=" + x[len("-optP"):] for x in user_compile_flags if x.startswith("-optP-D")]
hsc_inputs = []
if version:
(version_macro_headers, version_macro_flags) = version_macro_includes(dep_info)
hsc_flags += ["--cflag=" + x for x in version_macro_flags]
hsc_inputs += set.to_list(version_macro_headers)
# Add import hierarchy root.
# Note that this is not perfect, since GHC requires hs-boot files
# to be in the same directory as the corresponding .hs file. Thus
# the two must both have the same root; i.e., both plain files,
# both in bin_dir, or both in genfiles_dir.
import_dirs = set.from_list([
hs.src_root,
paths.join(hs.bin_dir.path, hs.src_root),
paths.join(hs.genfiles_dir.path, hs.src_root),
])
for s in srcs:
if s.extension == "h":
header_files.append(s)
elif s.extension == "hsc":
s0, idir = _process_hsc_file(hs, cc, hsc_flags, hsc_inputs, s)
set.mutable_insert(source_files, s0)
set.mutable_insert(import_dirs, idir)
elif s.extension in ["hs-boot", "lhs-boot"]:
boot_files.append(s)
else:
set.mutable_insert(source_files, s)
if s in import_dir_map:
idir = import_dir_map[s]
set.mutable_insert(import_dirs, idir)
compile_flags += ["-i{0}".format(d) for d in set.to_list(import_dirs)]
# Write the -optP flags to a parameter file because they can be very long on Windows
# e.g. 27Kb for grpc-haskell
# Equivalent to: compile_flags += ["-optP" + f for f in cc.cpp_flags]
optp_args_file = hs.actions.declare_file("optp_args_%s" % hs.name)
optp_args = hs.actions.args()
optp_args.add_all(cc.cpp_flags)
optp_args.set_param_file_format("multiline")
hs.actions.write(optp_args_file, optp_args)
compile_flags += ["-optP@" + optp_args_file.path]
compile_flags += cc.include_args
locale_archive_depset = (
depset([hs.toolchain.locale_archive]) if hs.toolchain.locale_archive != None else depset()
)
# This is absolutely required otherwise GHC doesn't know what package it's
# creating `Name`s for to put them in Haddock interface files which then
# results in Haddock not being able to find names for linking in
# environment after reading its interface file later.
if my_pkg_id != None:
unit_id_args = [
"-this-unit-id",
pkg_id.to_string(my_pkg_id),
"-optP-DCURRENT_PACKAGE_KEY=\"{}\"".format(pkg_id.to_string(my_pkg_id)),
]
compile_flags += unit_id_args
args = hs.actions.args()
# Compilation mode. Allow rule-supplied compiler flags to override it.
if hs.mode == "opt":
args.add("-O2")
args.add("-static")
if with_profiling:
args.add("-prof", "-fexternal-interpreter")
# Common flags
args.add_all([
"-v0",
"-no-link",
"-fPIC",
"-hide-all-packages",
# Should never trigger in sandboxed builds, but can be useful
# to debug issues in non-sandboxed builds.
"-Wmissing-home-modules",
])
# Output directories
args.add_all([
"-odir",
objects_dir.path,
"-hidir",
interfaces_dir.path,
])
# Interface files with profiling have to have the extension "p_hi":
# https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/packages.html#installedpackageinfo-a-package-specification
# otherwise we won't be able to register them with ghc-pkg.
if with_profiling:
args.add_all([
"-hisuf",
"p_hi",
"-osuf",
"p_o",
])
args.add_all(compile_flags)
# Plugins
for plugin in plugins:
args.add("-fplugin={}".format(plugin[GhcPluginInfo].module))
for opt in plugin[GhcPluginInfo].args:
args.add_all(["-fplugin-opt", "{}:{}".format(plugin[GhcPluginInfo].module, opt)])
plugin_tool_inputs = [plugin[GhcPluginInfo].tool_inputs for plugin in plugins]
plugin_tool_input_manifests = [
manifest
for plugin in plugins
for manifest in plugin[GhcPluginInfo].tool_input_manifests
]
# Pass source files
for f in set.to_list(source_files):
args.add(f)
extra_source_files = depset(
transitive = [extra_srcs, depset(header_files), depset(boot_files)],
)
# Transitive library dependencies for runtime.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
merge_HaskellCcInfo(
dep_info.transitive_cc_dependencies,
plugin_dep_info.transitive_cc_dependencies,
),
)
return struct(
args = args,
compile_flags = compile_flags,
inputs = depset(transitive = [
depset(header_files),
depset(boot_files),
set.to_depset(source_files),
extra_source_files,
depset(cc.hdrs),
set.to_depset(dep_info.package_databases),
set.to_depset(dep_info.interface_dirs),
depset(dep_info.static_libraries),
depset(dep_info.static_libraries_prof),
set.to_depset(dep_info.dynamic_libraries),
set.to_depset(plugin_dep_info.package_databases),
set.to_depset(plugin_dep_info.interface_dirs),
depset(plugin_dep_info.static_libraries),
depset(plugin_dep_info.static_libraries_prof),
set.to_depset(plugin_dep_info.dynamic_libraries),
depset(library_deps),
depset(ld_library_deps),
java.inputs,
locale_archive_depset,
depset(transitive = plugin_tool_inputs),
depset([optp_args_file]),
]),
input_manifests = plugin_tool_input_manifests,
objects_dir = objects_dir,
interfaces_dir = interfaces_dir,
outputs = [objects_dir, interfaces_dir],
source_files = source_files,
extra_source_files = depset(transitive = [extra_source_files, depset([optp_args_file])]),
import_dirs = import_dirs,
env = dicts.add(
ghc_env,
java.env,
hs.env,
),
)
def _hpc_compiler_args(hs):
hpcdir = "{}/{}/.hpc".format(hs.bin_dir.path, hs.package_root)
return ["-fhpc", "-hpcdir", hpcdir]
def _coverage_datum(mix_file, src_file, target_label):
return struct(
mix_file = mix_file,
src_file = src_file,
target_label = target_label,
)
def compile_binary(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs,
ls_modules,
import_dir_map,
extra_srcs,
user_compile_flags,
dynamic,
with_profiling,
main_function,
version,
inspect_coverage = False,
plugins = []):
"""Compile a Haskell target into object files suitable for linking.
Returns:
struct with the following fields:
object_files: list of static object files
object_dyn_files: list of dynamic object files
modules: set of module names
source_files: set of Haskell source files
"""
c = _compilation_defaults(hs, cc, java, dep_info, plugin_dep_info, srcs, import_dir_map, extra_srcs, user_compile_flags, with_profiling, my_pkg_id = None, version = version, plugins = plugins)
c.args.add_all(["-main-is", main_function])
if dynamic:
# For binaries, GHC creates .o files even for code to be
# linked dynamically. So we have to force the object suffix to
# be consistent with the dynamic object suffix in the library
# case.
c.args.add_all(["-dynamic", "-osuf dyn_o"])
coverage_data = []
if inspect_coverage:
c.args.add_all(_hpc_compiler_args(hs))
for src_file in srcs:
module = module_name(hs, src_file)
mix_file = hs.actions.declare_file(".hpc/{module}.mix".format(module = module))
coverage_data.append(_coverage_datum(mix_file, src_file, hs.label))
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = c.inputs,
input_manifests = c.input_manifests,
outputs = c.outputs + [datum.mix_file for datum in coverage_data],
mnemonic = "HaskellBuildBinary" + ("Prof" if with_profiling else ""),
progress_message = "HaskellBuildBinary {}".format(hs.label),
env = c.env,
arguments = c.args,
)
if with_profiling:
exposed_modules_file = None
else:
exposed_modules_file = hs.actions.declare_file(
target_unique_name(hs, "exposed-modules"),
)
hs.actions.run(
inputs = [c.interfaces_dir, hs.toolchain.global_pkg_db],
outputs = [exposed_modules_file],
executable = ls_modules,
arguments = [
c.interfaces_dir.path,
hs.toolchain.global_pkg_db.path,
"/dev/null", # no hidden modules
"/dev/null", # no reexported modules
exposed_modules_file.path,
],
use_default_shell_env = True,
)
return struct(
objects_dir = c.objects_dir,
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
compile_flags = c.compile_flags,
exposed_modules_file = exposed_modules_file,
coverage_data = coverage_data,
)
def compile_library(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs,
ls_modules,
other_modules,
exposed_modules_reexports,
import_dir_map,
extra_srcs,
user_compile_flags,
with_shared,
with_profiling,
my_pkg_id,
plugins = []):
"""Build arguments for Haskell package build.
Returns:
struct with the following fields:
interfaces_dir: directory containing interface files
interface_files: list of interface files
object_files: list of static object files
object_dyn_files: list of dynamic object files
compile_flags: list of string arguments suitable for Haddock
modules: set of module names
source_files: set of Haskell module files
import_dirs: import directories that should make all modules visible (for GHCi)
"""
c = _compilation_defaults(hs, cc, java, dep_info, plugin_dep_info, srcs, import_dir_map, extra_srcs, user_compile_flags, with_profiling, my_pkg_id = my_pkg_id, version = my_pkg_id.version, plugins = plugins)
if with_shared:
c.args.add("-dynamic-too")
coverage_data = []
if hs.coverage_enabled:
c.args.add_all(_hpc_compiler_args(hs))
for src_file in srcs:
pkg_id_string = pkg_id.to_string(my_pkg_id)
module = module_name(hs, src_file)
mix_file = hs.actions.declare_file(".hpc/{pkg}/{module}.mix".format(pkg = pkg_id_string, module = module))
coverage_data.append(_coverage_datum(mix_file, src_file, hs.label))
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = c.inputs,
input_manifests = c.input_manifests,
outputs = c.outputs + [datum.mix_file for datum in coverage_data],
mnemonic = "HaskellBuildLibrary" + ("Prof" if with_profiling else ""),
progress_message = "HaskellBuildLibrary {}".format(hs.label),
env = c.env,
arguments = c.args,
)
if with_profiling:
exposed_modules_file = None
else:
hidden_modules_file = hs.actions.declare_file(
target_unique_name(hs, "hidden-modules"),
)
hs.actions.write(
output = hidden_modules_file,
content = ", ".join(other_modules),
)
reexported_modules_file = hs.actions.declare_file(
target_unique_name(hs, "reexported-modules"),
)
hs.actions.write(
output = reexported_modules_file,
content = ", ".join(exposed_modules_reexports),
)
exposed_modules_file = hs.actions.declare_file(
target_unique_name(hs, "exposed-modules"),
)
hs.actions.run(
inputs = [
c.interfaces_dir,
hs.toolchain.global_pkg_db,
hidden_modules_file,
reexported_modules_file,
],
outputs = [exposed_modules_file],
executable = ls_modules,
arguments = [
c.interfaces_dir.path,
hs.toolchain.global_pkg_db.path,
hidden_modules_file.path,
reexported_modules_file.path,
exposed_modules_file.path,
],
use_default_shell_env = True,
)
return struct(
interfaces_dir = c.interfaces_dir,
objects_dir = c.objects_dir,
compile_flags = c.compile_flags,
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
exposed_modules_file = exposed_modules_file,
coverage_data = coverage_data,
)

View file

@ -0,0 +1,667 @@
"""Actions for linking object code produced by compilation"""
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"is_static_library",
"ln",
)
load(":private/pkg_id.bzl", "pkg_id")
load(":private/set.bzl", "set")
load(":private/list.bzl", "list")
# tests in /tests/unit_tests/BUILD
def parent_dir_path(path):
"""Returns the path of the parent directory.
For a relative path with just a file, "." is returned.
The path is not normalized.
foo => .
foo/ => foo
foo/bar => foo
foo/bar/baz => foo/bar
foo/../bar => foo/..
Args:
a path string
Returns:
A path list of the form `["foo", "bar"]`
"""
path_dir = paths.dirname(path)
# dirname returns "" if there is no parent directory
# In that case we return the identity path, which is ".".
if path_dir == "":
return ["."]
else:
return path_dir.split("/")
def __check_dots(target, path):
# theres still (non-leading) .. in split
if ".." in path:
fail("the short_path of target {} (which is {}) contains more dots than loading `../`. We cant handle that.".format(
target,
target.short_path,
))
# skylark doesnt allow nested defs, which is a mystery.
def _get_target_parent_dir(target):
"""get the parent dir and handle leading short_path dots,
which signify that the target is in an external repository.
Args:
target: a target, .short_path is used
Returns:
(is_external, parent_dir)
`is_external`: Bool whether the path points to an external repository
`parent_dir`: The parent directory, either up to the runfiles toplel,
up to the external repository toplevel.
Is `[]` if there is no parent dir.
"""
parent_dir = parent_dir_path(target.short_path)
if parent_dir[0] == "..":
__check_dots(target, parent_dir[1:])
return (True, parent_dir[1:])
elif parent_dir[0] == ".":
return (False, [])
else:
__check_dots(target, parent_dir)
return (False, parent_dir)
# tests in /tests/unit_tests/BUILD
def create_rpath_entry(
binary,
dependency,
keep_filename,
prefix = ""):
"""Return a (relative) path that points from `binary` to `dependecy`
while not leaving the current bazel runpath, taking into account weird
corner cases of `.short_path` concerning external repositories.
The resulting entry should be able to be inserted into rpath or similar.
Examples:
bin.short_path=foo/a.so and dep.short_path=bar/b.so
=> create_rpath_entry(bin, dep, False) = ../bar
and
create_rpath_entry(bin, dep, True) = ../bar/b.so
and
create_rpath_entry(bin, dep, True, "$ORIGIN") = $ORIGIN/../bar/b.so
Args:
binary: target of current binary
dependency: target of dependency to relatively point to
keep_filename: whether to point to the filename or its parent dir
prefix: string path prefix to add before the relative path
Returns:
relative path string
"""
(bin_is_external, bin_parent_dir) = _get_target_parent_dir(binary)
(dep_is_external, dep_parent_dir) = _get_target_parent_dir(dependency)
# backup through parent directories of the binary,
# to the runfiles directory
bin_backup = [".."] * len(bin_parent_dir)
# external repositories live in `target.runfiles/external`,
# while the internal repository lives in `target.runfiles`.
# The `.short_path`s of external repositories are strange,
# they start with `../`, but you cannot just append that in
# order to find the correct runpath. Instead you have to use
# the following logic to construct the correct runpaths:
if bin_is_external:
if dep_is_external:
# stay in `external`
path_segments = bin_backup
else:
# backup out of `external`
path_segments = [".."] + bin_backup
elif dep_is_external:
# go into `external`
path_segments = bin_backup + ["external"]
else:
# no special external traversal
path_segments = bin_backup
# then add the parent dir to our dependency
path_segments.extend(dep_parent_dir)
# optionally add the filename
if keep_filename:
path_segments.append(
paths.basename(dependency.short_path),
)
# normalize for good measure and create the final path
path = paths.normalize("/".join(path_segments))
# and add the prefix if applicable
if prefix == "":
return path
else:
return prefix + "/" + path
def _merge_parameter_files(hs, file1, file2):
"""Merge two GHC parameter files into one.
Args:
hs: Haskell context.
file1: The first parameter file.
file2: The second parameter file.
Returns:
File: A new parameter file containing the parameters of both input files.
The file name is based on the file names of the input files. The file
is located next to the first input file.
"""
params_file = hs.actions.declare_file(
file1.basename + ".and." + file2.basename,
sibling = file1,
)
hs.actions.run_shell(
inputs = [file1, file2],
outputs = [params_file],
command = """
cat {file1} {file2} > {out}
""".format(
file1 = file1.path,
file2 = file2.path,
out = params_file.path,
),
)
return params_file
def _darwin_create_extra_linker_flags_file(hs, cc, objects_dir, executable, dynamic, solibs):
"""Write additional linker flags required on MacOS to a parameter file.
Args:
hs: Haskell context.
cc: CcInteropInfo, information about C dependencies.
objects_dir: Directory storing object files.
Used to determine output file location.
executable: The executable being built.
dynamic: Bool: Whether to link dynamically or statically.
solibs: List of dynamic library dependencies.
Returns:
File: Parameter file with additional linker flags. To be passed to GHC.
"""
# On Darwin GHC will pass the dead_strip_dylibs flag to the linker. This
# flag will remove any shared library loads from the binary's header that
# are not directly resolving undefined symbols in the binary. I.e. any
# indirect shared library dependencies will be removed. This conflicts with
# Bazel's builtin cc rules, which assume that the final binary will load
# all transitive shared library dependencies. In particlar shared libraries
# produced by Bazel's cc rules never load shared libraries themselves. This
# causes missing symbols at runtime on MacOS, see #170.
#
# The following work-around applies the `-u` flag to the linker for any
# symbol that is undefined in any transitive shared library dependency.
# This forces the linker to resolve these undefined symbols in all
# transitive shared library dependencies and keep the corresponding load
# commands in the binary's header.
#
# Unfortunately, this prohibits elimination of any truly redundant shared
# library dependencies. Furthermore, the transitive closure of shared
# library dependencies can be large, so this makes it more likely to exceed
# the MACH-O header size limit on MacOS.
#
# This is a horrendous hack, but it seems to be forced on us by how Bazel
# builds dynamic cc libraries.
suffix = ".dynamic.linker_flags" if dynamic else ".static.linker_flags"
linker_flags_file = hs.actions.declare_file(
executable.basename + suffix,
sibling = objects_dir,
)
hs.actions.run_shell(
inputs = solibs,
outputs = [linker_flags_file],
command = """
touch {out}
for lib in {solibs}; do
{nm} -u "$lib" | sed 's/^/-optl-Wl,-u,/' >> {out}
done
""".format(
nm = cc.tools.nm,
solibs = " ".join(["\"" + l.path + "\"" for l in solibs]),
out = linker_flags_file.path,
),
)
return linker_flags_file
def _create_objects_dir_manifest(hs, objects_dir, dynamic, with_profiling):
suffix = ".dynamic.manifest" if dynamic else ".static.manifest"
objects_dir_manifest = hs.actions.declare_file(
objects_dir.basename + suffix,
sibling = objects_dir,
)
if with_profiling:
ext = "p_o"
elif dynamic:
ext = "dyn_o"
else:
ext = "o"
hs.actions.run_shell(
inputs = [objects_dir],
outputs = [objects_dir_manifest],
command = """
find {dir} -name '*.{ext}' > {out}
""".format(
dir = objects_dir.path,
ext = ext,
out = objects_dir_manifest.path,
),
use_default_shell_env = True,
)
return objects_dir_manifest
def _link_dependencies(hs, dep_info, dynamic, binary, args):
"""Configure linker flags and inputs.
Configure linker flags for C library dependencies and runtime dynamic
library dependencies. And collect the C libraries to pass as inputs to
the linking action.
Args:
hs: Haskell context.
dep_info: HaskellInfo provider.
dynamic: Bool: Whether to link dynamically, or statically.
binary: Final linked binary.
args: Arguments to the linking action.
Returns:
depset: C library dependencies to provide as input to the linking action.
"""
# Pick linking context based on linking mode.
if dynamic:
link_ctx = dep_info.cc_dependencies.dynamic_linking
trans_link_ctx = dep_info.transitive_cc_dependencies.dynamic_linking
else:
link_ctx = dep_info.cc_dependencies.static_linking
trans_link_ctx = dep_info.transitive_cc_dependencies.static_linking
# Direct C library dependencies to link.
# I.e. not indirect through another Haskell dependency.
# Such indirect dependencies are linked by GHC based on the extra-libraries
# fields in the dependency's package configuration file.
libs_to_link = link_ctx.libraries_to_link.to_list()
_add_external_libraries(args, libs_to_link)
# Transitive library dependencies to have in scope for linking.
trans_libs_to_link = trans_link_ctx.libraries_to_link.to_list()
# Libraries to pass as inputs to linking action.
cc_link_libs = depset(transitive = [
depset(trans_libs_to_link),
])
# Transitive dynamic library dependencies to have in RUNPATH.
cc_solibs = trans_link_ctx.dynamic_libraries_for_runtime.to_list()
# Collect Haskell dynamic library dependencies in common RUNPATH.
# This is to keep the number of RUNPATH entries low, for faster loading
# and to avoid exceeding the MACH-O header size limit on MacOS.
hs_solibs = []
if dynamic:
hs_solibs_prefix = "_hssolib_%s" % hs.name
for dep in set.to_list(dep_info.dynamic_libraries):
dep_link = hs.actions.declare_file(
paths.join(hs_solibs_prefix, dep.basename),
sibling = binary,
)
ln(hs, dep, dep_link)
hs_solibs.append(dep_link)
# Configure RUNPATH.
rpaths = _infer_rpaths(
hs.toolchain.is_darwin,
binary,
trans_link_ctx.dynamic_libraries_for_runtime.to_list() +
hs_solibs,
)
for rpath in set.to_list(rpaths):
args.add("-optl-Wl,-rpath," + rpath)
return (cc_link_libs, cc_solibs, hs_solibs)
def link_binary(
hs,
cc,
dep_info,
extra_srcs,
compiler_flags,
objects_dir,
dynamic,
with_profiling,
version):
"""Link Haskell binary from static object files.
Returns:
File: produced executable
"""
exe_name = hs.name + (".exe" if hs.toolchain.is_windows else "")
executable = hs.actions.declare_file(exe_name)
args = hs.actions.args()
args.add_all(["-optl" + f for f in cc.linker_flags])
if with_profiling:
args.add("-prof")
args.add_all(hs.toolchain.compiler_flags)
args.add_all(compiler_flags)
# By default, GHC will produce mostly-static binaries, i.e. in which all
# Haskell code is statically linked and foreign libraries and system
# dependencies are dynamically linked. If linkstatic is false, i.e. the user
# has requested fully dynamic linking, we must therefore add flags to make
# sure that GHC dynamically links Haskell code too. The one exception to
# this is when we are compiling for profiling, which currently does not play
# nicely with dynamic linking.
if dynamic:
if with_profiling:
print("WARNING: dynamic linking and profiling don't mix. Omitting -dynamic.\nSee https://ghc.haskell.org/trac/ghc/ticket/15394")
else:
args.add_all(["-pie", "-dynamic"])
# When compiling with `-threaded`, GHC needs to link against
# the pthread library when linking against static archives (.a).
# We assume its not a problem to pass it for other cases,
# so we just default to passing it.
args.add("-optl-pthread")
args.add_all(["-o", executable.path])
# De-duplicate optl calls while preserving ordering: we want last
# invocation of an object to remain last. That is `-optl foo -optl
# bar -optl foo` becomes `-optl bar -optl foo`. Do this by counting
# number of occurrences. That way we only build dict and add to args
# directly rather than doing multiple reversals with temporary
# lists.
args.add_all(pkg_info_to_compile_flags(expose_packages(
dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = None,
custom_package_databases = None,
version = version,
)))
(cc_link_libs, cc_solibs, hs_solibs) = _link_dependencies(
hs = hs,
dep_info = dep_info,
dynamic = dynamic,
binary = executable,
args = args,
)
# XXX: Suppress a warning that Clang prints due to GHC automatically passing
# "-pie" or "-no-pie" to the C compiler.
# This is linked to https://ghc.haskell.org/trac/ghc/ticket/15319
args.add_all([
"-optc-Wno-unused-command-line-argument",
"-optl-Wno-unused-command-line-argument",
])
objects_dir_manifest = _create_objects_dir_manifest(
hs,
objects_dir,
dynamic = dynamic,
with_profiling = with_profiling,
)
extra_linker_flags_file = None
if hs.toolchain.is_darwin:
args.add("-optl-Wl,-headerpad_max_install_names")
# Nixpkgs commit 3513034208a introduces -liconv in NIX_LDFLAGS on
# Darwin. We don't currently handle NIX_LDFLAGS in any special
# way, so a hack is to simply do what NIX_LDFLAGS is telling us we
# should do always when using a toolchain from Nixpkgs.
# TODO remove this gross hack.
args.add("-liconv")
extra_linker_flags_file = _darwin_create_extra_linker_flags_file(
hs,
cc,
objects_dir,
executable,
dynamic,
cc_solibs,
)
if extra_linker_flags_file != None:
params_file = _merge_parameter_files(hs, objects_dir_manifest, extra_linker_flags_file)
else:
params_file = objects_dir_manifest
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = depset(transitive = [
depset(extra_srcs),
set.to_depset(dep_info.package_databases),
set.to_depset(dep_info.dynamic_libraries),
depset(dep_info.static_libraries),
depset(dep_info.static_libraries_prof),
depset([objects_dir]),
cc_link_libs,
]),
outputs = [executable],
mnemonic = "HaskellLinkBinary",
arguments = args,
params_file = params_file,
)
return (executable, cc_solibs + hs_solibs)
def _add_external_libraries(args, ext_libs):
"""Add options to `args` that allow us to link to `ext_libs`.
Args:
args: Args object.
ext_libs: C library dependencies.
"""
# Deduplicate the list of ext_libs based on their
# library name (file name stripped of lib prefix and endings).
# This keeps the command lines short, e.g. when a C library
# like `liblz4.so` appears in multiple dependencies.
# XXX: this is only done in here
# Shouldnt the deduplication be applied to *all* external libraries?
deduped = list.dedup_on(get_lib_name, ext_libs)
for lib in deduped:
args.add_all([
"-L{0}".format(
paths.dirname(lib.path),
),
"-l{0}".format(
# technically this is the second call to get_lib_name,
# but the added clarity makes up for it.
get_lib_name(lib),
),
])
def _infer_rpaths(is_darwin, target, solibs):
"""Return set of RPATH values to be added to target so it can find all
solibs
The resulting paths look like:
$ORIGIN/../../path/to/solib/dir
This means: "go upwards to your runfiles directory, then descend into
the parent folder of the solib".
Args:
is_darwin: Whether we're compiling on and for Darwin.
target: File, executable or library we're linking.
solibs: A list of Files, shared objects that the target needs.
Returns:
Set of strings: rpaths to add to target.
"""
r = set.empty()
if is_darwin:
prefix = "@loader_path"
else:
prefix = "$ORIGIN"
for solib in solibs:
rpath = create_rpath_entry(
binary = target,
dependency = solib,
keep_filename = False,
prefix = prefix,
)
set.mutable_insert(r, rpath)
return r
def _so_extension(hs):
"""Returns the extension for shared libraries.
Args:
hs: Haskell rule context.
Returns:
string of extension.
"""
return "dylib" if hs.toolchain.is_darwin else "so"
def link_library_static(hs, cc, dep_info, objects_dir, my_pkg_id, with_profiling):
"""Link a static library for the package using given object files.
Returns:
File: Produced static library.
"""
static_library = hs.actions.declare_file(
"lib{0}.a".format(pkg_id.library_name(hs, my_pkg_id, prof_suffix = with_profiling)),
)
objects_dir_manifest = _create_objects_dir_manifest(
hs,
objects_dir,
dynamic = False,
with_profiling = with_profiling,
)
args = hs.actions.args()
inputs = [objects_dir, objects_dir_manifest] + cc.files
if hs.toolchain.is_darwin:
# On Darwin, ar doesn't support params files.
args.add_all([
static_library,
objects_dir_manifest.path,
])
# TODO Get ar location from the CC toolchain. This is
# complicated by the fact that the CC toolchain does not
# always use ar, and libtool has an entirely different CLI.
# See https://github.com/bazelbuild/bazel/issues/5127
hs.actions.run_shell(
inputs = inputs,
outputs = [static_library],
mnemonic = "HaskellLinkStaticLibrary",
command = "{ar} qc $1 $(< $2)".format(ar = cc.tools.ar),
arguments = [args],
# Use the default macosx toolchain
env = {"SDKROOT": "macosx"},
)
else:
args.add_all([
"qc",
static_library,
"@" + objects_dir_manifest.path,
])
hs.actions.run(
inputs = inputs,
outputs = [static_library],
mnemonic = "HaskellLinkStaticLibrary",
executable = cc.tools.ar,
arguments = [args],
)
return static_library
def link_library_dynamic(hs, cc, dep_info, extra_srcs, objects_dir, my_pkg_id):
"""Link a dynamic library for the package using given object files.
Returns:
File: Produced dynamic library.
"""
dynamic_library = hs.actions.declare_file(
"lib{0}-ghc{1}.{2}".format(
pkg_id.library_name(hs, my_pkg_id),
hs.toolchain.version,
_so_extension(hs),
),
)
args = hs.actions.args()
args.add_all(["-optl" + f for f in cc.linker_flags])
args.add_all(["-shared", "-dynamic"])
# Work around macOS linker limits. This fix has landed in GHC HEAD, but is
# not yet in a release; plus, we still want to support older versions of
# GHC. For details, see: https://phabricator.haskell.org/D4714
if hs.toolchain.is_darwin:
args.add("-optl-Wl,-dead_strip_dylibs")
args.add_all(pkg_info_to_compile_flags(expose_packages(
dep_info,
lib_info = None,
use_direct = True,
use_my_pkg_id = None,
custom_package_databases = None,
version = my_pkg_id.version if my_pkg_id else None,
)))
(cc_link_libs, _cc_solibs, _hs_solibs) = _link_dependencies(
hs = hs,
dep_info = dep_info,
dynamic = True,
binary = dynamic_library,
args = args,
)
args.add_all(["-o", dynamic_library.path])
# Profiling not supported for dynamic libraries.
objects_dir_manifest = _create_objects_dir_manifest(
hs,
objects_dir,
dynamic = True,
with_profiling = False,
)
hs.toolchain.actions.run_ghc(
hs,
cc,
inputs = depset([objects_dir], transitive = [
depset(extra_srcs),
set.to_depset(dep_info.package_databases),
set.to_depset(dep_info.dynamic_libraries),
cc_link_libs,
]),
outputs = [dynamic_library],
mnemonic = "HaskellLinkDynamicLibrary",
arguments = args,
params_file = objects_dir_manifest,
)
return dynamic_library

View file

@ -0,0 +1,210 @@
"""Action for creating packages and registering them with ghc-pkg"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(":private/path_utils.bzl", "target_unique_name")
load(":private/pkg_id.bzl", "pkg_id")
load(":private/set.bzl", "set")
load(":private/path_utils.bzl", "get_lib_name")
def _get_extra_libraries(dep_info):
"""Get directories and library names for extra library dependencies.
Args:
dep_info: HaskellInfo provider of the package.
Returns:
(dirs, libs):
dirs: list: Library search directories for extra library dependencies.
libs: list: Extra library dependencies.
"""
cc_libs = dep_info.cc_dependencies.dynamic_linking.libraries_to_link.to_list()
# The order in which library dependencies are listed is relevant when
# linking static archives. To maintain the order defined by the input
# depset we collect the library dependencies in a list, and use a separate
# set to deduplicate entries.
seen_libs = set.empty()
extra_libs = []
extra_lib_dirs = set.empty()
for lib in cc_libs:
lib_name = get_lib_name(lib)
if not set.is_member(seen_libs, lib_name):
set.mutable_insert(seen_libs, lib_name)
extra_libs.append(lib_name)
set.mutable_insert(extra_lib_dirs, lib.dirname)
return (set.to_list(extra_lib_dirs), extra_libs)
def package(
hs,
dep_info,
interfaces_dir,
interfaces_dir_prof,
static_library,
dynamic_library,
exposed_modules_file,
other_modules,
my_pkg_id,
static_library_prof):
"""Create GHC package using ghc-pkg.
Args:
hs: Haskell context.
interfaces_dir: Directory containing interface files.
static_library: Static library of the package.
dynamic_library: Dynamic library of the package.
static_library_prof: Static library compiled with profiling or None.
Returns:
(File, File): GHC package conf file, GHC package cache file
"""
pkg_db_dir = pkg_id.to_string(my_pkg_id)
conf_file = hs.actions.declare_file(
paths.join(pkg_db_dir, "{0}.conf".format(pkg_db_dir)),
)
cache_file = hs.actions.declare_file("package.cache", sibling = conf_file)
import_dir = paths.join(
"${pkgroot}",
paths.join(pkg_db_dir, "_iface"),
)
interfaces_dirs = [interfaces_dir]
if interfaces_dir_prof != None:
import_dir_prof = paths.join(
"${pkgroot}",
paths.join(pkg_db_dir, "_iface_prof"),
)
interfaces_dirs.append(interfaces_dir_prof)
else:
import_dir_prof = ""
(extra_lib_dirs, extra_libs) = _get_extra_libraries(dep_info)
metadata_entries = {
"name": my_pkg_id.name,
"version": my_pkg_id.version,
"id": pkg_id.to_string(my_pkg_id),
"key": pkg_id.to_string(my_pkg_id),
"exposed": "True",
"hidden-modules": " ".join(other_modules),
"import-dirs": " ".join([import_dir, import_dir_prof]),
"library-dirs": " ".join(["${pkgroot}"] + extra_lib_dirs),
"dynamic-library-dirs": " ".join(["${pkgroot}"] + extra_lib_dirs),
"hs-libraries": pkg_id.library_name(hs, my_pkg_id),
"extra-libraries": " ".join(extra_libs),
"depends": ", ".join(
# Prebuilt dependencies are added further down, since their
# package-ids are not available as strings but in build outputs.
set.to_list(dep_info.package_ids),
),
}
# Create a file from which ghc-pkg will create the actual package
# from. List of exposed modules generated below.
metadata_file = hs.actions.declare_file(target_unique_name(hs, "metadata"))
hs.actions.write(
output = metadata_file,
content = "\n".join([
"{0}: {1}".format(k, v)
for k, v in metadata_entries.items()
if v
]) + "\n",
)
# Collect the package id files of all prebuilt dependencies.
prebuilt_deps_id_files = [
dep.id_file
for dep in set.to_list(dep_info.prebuilt_dependencies)
]
# Combine exposed modules and other metadata to form the package
# configuration file.
prebuilt_deps_args = hs.actions.args()
prebuilt_deps_args.add_all([f.path for f in prebuilt_deps_id_files])
prebuilt_deps_args.use_param_file("%s", use_always = True)
prebuilt_deps_args.set_param_file_format("multiline")
hs.actions.run_shell(
inputs = [metadata_file, exposed_modules_file] + prebuilt_deps_id_files,
outputs = [conf_file],
command = """
cat $1 > $4
echo "exposed-modules: `cat $2`" >> $4
# this is equivalent to 'readarray'. We do use 'readarray' in order to
# support older bash versions.
while IFS= read -r line; do deps_id_files+=("$line"); done < $3
if [ ${#deps_id_files[@]} -eq 0 ]; then
deps=""
else
deps=$(cat "${deps_id_files[@]}" | tr '\n' " ")
fi
echo "depends: $deps" >> $4
""",
arguments = [
metadata_file.path,
exposed_modules_file.path,
prebuilt_deps_args,
conf_file.path,
],
use_default_shell_env = True,
)
# Make the call to ghc-pkg and use the package configuration file
package_path = ":".join([c.dirname for c in set.to_list(dep_info.package_databases)]) + ":"
hs.actions.run(
inputs = depset(transitive = [
set.to_depset(dep_info.package_databases),
depset(interfaces_dirs),
depset([
input
for input in [
static_library,
conf_file,
dynamic_library,
static_library_prof,
]
if input
]),
]),
outputs = [cache_file],
env = {
"GHC_PACKAGE_PATH": package_path,
},
mnemonic = "HaskellRegisterPackage",
progress_message = "HaskellRegisterPackage {}".format(hs.label),
executable = hs.tools.ghc_pkg,
# Registration of a new package consists in,
#
# 1. copying the registration file into the package db,
# 2. performing some validation on the registration file content,
# 3. recaching, i.e. regenerating the package db cache file.
#
# Normally, this is all done by `ghc-pkg register`. But in our
# case, `ghc-pkg register` is painful, because the validation
# it performs is slow, somewhat redundant but especially, too
# strict (see e.g.
# https://ghc.haskell.org/trac/ghc/ticket/15478). So we do (1)
# and (3) manually, by copying then calling `ghc-pkg recache`
# directly.
#
# The downside is that we do lose the few validations that
# `ghc-pkg register` was doing that was useful. e.g. when
# reexporting modules, validation checks that the source
# module does exist.
#
# TODO Go back to using `ghc-pkg register`. Blocked by
# https://ghc.haskell.org/trac/ghc/ticket/15478
arguments = [
"recache",
"--package-db={0}".format(conf_file.dirname),
"-v0",
"--no-expand-pkgroot",
],
# XXX: Seems required for this to work on Windows
use_default_shell_env = True,
)
return conf_file, cache_file

View file

@ -0,0 +1,175 @@
"""GHCi REPL support"""
load(":private/context.bzl", "render_env")
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"link_libraries",
"ln",
"target_unique_name",
)
load(":providers.bzl", "get_libs_for_ghc_linker")
load(
":private/set.bzl",
"set",
)
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//lib:shell.bzl", "shell")
def build_haskell_repl(
hs,
ghci_script,
ghci_repl_wrapper,
user_compile_flags,
repl_ghci_args,
hs_info,
output,
package_databases,
version,
lib_info = None):
"""Build REPL script.
Args:
hs: Haskell context.
hs_info: HaskellInfo.
package_databases: package caches excluding the cache file of the package
we're creating a REPL for.
lib_info: If we're building REPL for a library target, pass
HaskellLibraryInfo here, otherwise it should be None.
Returns:
None.
"""
# The base and directory packages are necessary for the GHCi script we use
# (loads source files and brings in scope the corresponding modules).
args = ["-package", "base", "-package", "directory"]
pkg_ghc_info = expose_packages(
hs_info,
lib_info,
use_direct = False,
use_my_pkg_id = None,
custom_package_databases = package_databases,
version = version,
)
args += pkg_info_to_compile_flags(pkg_ghc_info)
lib_imports = []
if lib_info != None:
for idir in set.to_list(hs_info.import_dirs):
args += ["-i{0}".format(idir)]
lib_imports.append(idir)
link_ctx = hs_info.cc_dependencies.dynamic_linking
libs_to_link = link_ctx.dynamic_libraries_for_runtime.to_list()
# External C libraries that we need to make available to the REPL.
libraries = link_libraries(libs_to_link, args)
# Transitive library dependencies to have in runfiles.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
hs_info.transitive_cc_dependencies,
path_prefix = "$RULES_HASKELL_EXEC_ROOT",
)
library_path = [paths.dirname(lib.path) for lib in library_deps]
ld_library_path = [paths.dirname(lib.path) for lib in ld_library_deps]
repl_file = hs.actions.declare_file(target_unique_name(hs, "repl"))
add_sources = ["*" + f.path for f in set.to_list(hs_info.source_files)]
ghci_repl_script = hs.actions.declare_file(
target_unique_name(hs, "ghci-repl-script"),
)
hs.actions.expand_template(
template = ghci_script,
output = ghci_repl_script,
substitutions = {
"{ADD_SOURCES}": " ".join(add_sources),
"{COMMANDS}": "",
},
)
# Extra arguments.
# `compiler flags` is the default set of arguments for the repl,
# augmented by `repl_ghci_args`.
# The ordering is important, first compiler flags (from toolchain
# and local rule), then from `repl_ghci_args`. This way the more
# specific arguments are listed last, and then have more priority in
# GHC.
# Note that most flags for GHCI do have their negative value, so a
# negative flag in `repl_ghci_args` can disable a positive flag set
# in `user_compile_flags`, such as `-XNoOverloadedStrings` will disable
# `-XOverloadedStrings`.
args += hs.toolchain.compiler_flags + user_compile_flags + hs.toolchain.repl_ghci_args + repl_ghci_args
hs.actions.expand_template(
template = ghci_repl_wrapper,
output = repl_file,
substitutions = {
"{ENV}": render_env(ghc_env),
"{TOOL}": hs.tools.ghci.path,
"{ARGS}": " ".join(
[
"-ghci-script",
paths.join("$RULES_HASKELL_EXEC_ROOT", ghci_repl_script.path),
] + [
shell.quote(a)
for a in args
],
),
},
is_executable = True,
)
ghc_info = struct(
has_version = pkg_ghc_info.has_version,
library_path = library_path,
ld_library_path = ld_library_path,
packages = pkg_ghc_info.packages,
package_ids = pkg_ghc_info.package_ids,
package_dbs = pkg_ghc_info.package_dbs,
lib_imports = lib_imports,
libraries = libraries,
execs = struct(
ghc = hs.tools.ghc.path,
ghci = hs.tools.ghci.path,
runghc = hs.tools.runghc.path,
),
flags = struct(
compiler = user_compile_flags,
toolchain_compiler = hs.toolchain.compiler_flags,
repl = repl_ghci_args,
toolchain_repl = hs.toolchain.repl_ghci_args,
),
)
ghc_info_file = hs.actions.declare_file(
target_unique_name(hs, "ghc-info"),
)
hs.actions.write(
output = ghc_info_file,
content = ghc_info.to_json(),
)
# XXX We create a symlink here because we need to force
# hs.tools.ghci and ghci_script and the best way to do that is
# to use hs.actions.run. That action, in turn must produce
# a result, so using ln seems to be the only sane choice.
extra_inputs = depset(transitive = [
depset([
hs.tools.ghci,
ghci_repl_script,
repl_file,
ghc_info_file,
]),
set.to_depset(package_databases),
depset(library_deps),
depset(ld_library_deps),
set.to_depset(hs_info.source_files),
])
ln(hs, repl_file, output, extra_inputs)

View file

@ -0,0 +1,115 @@
"""runghc support"""
load(":private/context.bzl", "render_env")
load(":private/packages.bzl", "expose_packages", "pkg_info_to_compile_flags")
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"link_libraries",
"ln",
"target_unique_name",
)
load(
":private/set.bzl",
"set",
)
load(":providers.bzl", "get_libs_for_ghc_linker")
load("@bazel_skylib//lib:shell.bzl", "shell")
def build_haskell_runghc(
hs,
runghc_wrapper,
user_compile_flags,
extra_args,
hs_info,
output,
package_databases,
version,
lib_info = None):
"""Build runghc script.
Args:
hs: Haskell context.
hs_info: HaskellInfo.
package_databases: package caches excluding the cache file of the package
we're creating a runghc for.
lib_info: If we're building runghc for a library target, pass
HaskellLibraryInfo here, otherwise it should be None.
Returns:
None.
"""
args = pkg_info_to_compile_flags(expose_packages(
hs_info,
lib_info,
use_direct = False,
use_my_pkg_id = None,
custom_package_databases = package_databases,
version = version,
))
if lib_info != None:
for idir in set.to_list(hs_info.import_dirs):
args += ["-i{0}".format(idir)]
link_ctx = hs_info.cc_dependencies.dynamic_linking
libs_to_link = link_ctx.dynamic_libraries_for_runtime.to_list()
# External C libraries that we need to make available to runghc.
link_libraries(libs_to_link, args)
# Transitive library dependencies to have in runfiles.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
hs_info.transitive_cc_dependencies,
path_prefix = "$RULES_HASKELL_EXEC_ROOT",
)
runghc_file = hs.actions.declare_file(target_unique_name(hs, "runghc"))
# Extra arguments.
# `compiler flags` is the default set of arguments for runghc,
# augmented by `extra_args`.
# The ordering is important, first compiler flags (from toolchain
# and local rule), then from `extra_args`. This way the more
# specific arguments are listed last, and then have more priority in
# GHC.
# Note that most flags for GHCI do have their negative value, so a
# negative flag in `extra_args` can disable a positive flag set
# in `user_compile_flags`, such as `-XNoOverloadedStrings` will disable
# `-XOverloadedStrings`.
args += hs.toolchain.compiler_flags + user_compile_flags + hs.toolchain.repl_ghci_args
# ghc args need to be wrapped up in "--ghc-arg=" when passing to runghc
runcompile_flags = ["--ghc-arg=%s" % a for a in args]
runcompile_flags += extra_args
hs.actions.expand_template(
template = runghc_wrapper,
output = runghc_file,
substitutions = {
"{ENV}": render_env(ghc_env),
"{TOOL}": hs.tools.runghc.path,
"{ARGS}": " ".join([shell.quote(a) for a in runcompile_flags]),
},
is_executable = True,
)
# XXX We create a symlink here because we need to force
# hs.tools.runghc and the best way to do that is
# to use hs.actions.run. That action, in turn must produce
# a result, so using ln seems to be the only sane choice.
extra_inputs = depset(transitive = [
depset([
hs.tools.runghc,
runghc_file,
]),
set.to_depset(package_databases),
depset(library_deps),
depset(ld_library_deps),
set.to_depset(hs_info.source_files),
])
ln(hs, runghc_file, output, extra_inputs)

View file

@ -0,0 +1,64 @@
"""Derived context with Haskell-specific fields and methods"""
load("@bazel_skylib//lib:paths.bzl", "paths")
HaskellContext = provider()
def haskell_context(ctx, attr = None):
toolchain = ctx.toolchains["@io_tweag_rules_haskell//haskell:toolchain"]
if not attr:
attr = ctx.attr
if hasattr(attr, "src_strip_prefix"):
src_strip_prefix = attr.src_strip_prefix
else:
src_strip_prefix = ""
src_root = paths.join(
ctx.label.workspace_root,
ctx.label.package,
src_strip_prefix,
)
env = {
"LANG": toolchain.locale,
}
if toolchain.locale_archive != None:
env["LOCALE_ARCHIVE"] = toolchain.locale_archive.path
coverage_enabled = False
if hasattr(ctx, "configuration"):
coverage_enabled = ctx.configuration.coverage_enabled
return HaskellContext(
# Fields
name = attr.name,
label = ctx.label,
toolchain = toolchain,
tools = toolchain.tools,
src_root = src_root,
package_root = ctx.label.workspace_root + ctx.label.package,
env = env,
mode = ctx.var["COMPILATION_MODE"],
actions = ctx.actions,
bin_dir = ctx.bin_dir,
genfiles_dir = ctx.genfiles_dir,
coverage_enabled = coverage_enabled,
)
def render_env(env):
"""Render environment dict to shell exports.
Example:
>>> render_env({"PATH": "foo:bar", "LANG": "lang"})
export PATH=foo:bar
export LANG=lang
"""
return "\n".join([
"export {}={}".format(k, v)
for k, v in env.items()
])

View file

@ -0,0 +1,128 @@
#!/usr/bin/env bash
# A wrapper for Haskell binaries which have been instrumented for hpc code coverage.
# Copy-pasted from Bazel's Bash runfiles library (tools/bash/runfiles/runfiles.bash).
set -euo pipefail
if [[ ! -d "${RUNFILES_DIR:-/dev/null}" && ! -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
if [[ -f "$0.runfiles_manifest" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.runfiles/MANIFEST" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles/MANIFEST"
elif [[ -f "$0.runfiles/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
export RUNFILES_DIR="$0.runfiles"
fi
fi
if [[ -f "${RUNFILES_DIR:-/dev/null}/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
source "${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash"
elif [[ -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
source "$(grep -m1 "^bazel_tools/tools/bash/runfiles/runfiles.bash " \
"$RUNFILES_MANIFEST_FILE" | cut -d ' ' -f 2-)"
else
echo >&2 "ERROR: cannot find @bazel_tools//tools/bash/runfiles:runfiles.bash"
exit 1
fi
# --- end runfiles.bash initialization ---
ERRORCOLOR='\033[1;31m'
CLEARCOLOR='\033[0m'
binary_path=$(rlocation {binary_path})
hpc_path=$(rlocation {hpc_path})
tix_file_path={tix_file_path}
coverage_report_format={coverage_report_format}
strict_coverage_analysis={strict_coverage_analysis}
package_path={package_path}
# either of the two expected coverage metrics should be set to -1 if they're meant to be unused
expected_covered_expressions_percentage={expected_covered_expressions_percentage}
expected_uncovered_expression_count={expected_uncovered_expression_count}
# gather the hpc directories
hpc_dir_args=""
mix_file_paths={mix_file_paths}
for m in "${mix_file_paths[@]}"
do
absolute_mix_file_path=$(rlocation $m)
hpc_parent_dir=$(dirname $absolute_mix_file_path)
trimmed_hpc_parent_dir=$(echo "${hpc_parent_dir%%.hpc*}")
hpc_dir_args="$hpc_dir_args --hpcdir=$trimmed_hpc_parent_dir.hpc"
done
# gather the modules to exclude from the coverage analysis
hpc_exclude_args=""
modules_to_exclude={modules_to_exclude}
for m in "${modules_to_exclude[@]}"
do
hpc_exclude_args="$hpc_exclude_args --exclude=$m"
done
# run the test binary, and then generate the report
$binary_path "$@" > /dev/null 2>&1
$hpc_path report "$tix_file_path" $hpc_dir_args $hpc_exclude_args \
--srcdir "." --srcdir "$package_path" > __hpc_coverage_report
# if we want a text report, just output the file generated in the previous step
if [ "$coverage_report_format" == "text" ]
then
echo "Overall report"
cat __hpc_coverage_report
fi
# check the covered expression percentage, and if it matches our expectations
if [ "$expected_covered_expressions_percentage" -ne -1 ]
then
covered_expression_percentage=$(grep "expressions used" __hpc_coverage_report | cut -c 1-3)
if [ "$covered_expression_percentage" -lt "$expected_covered_expressions_percentage" ]
then
echo -e "\n==>$ERRORCOLOR Inadequate expression coverage percentage.$CLEARCOLOR"
echo -e "==> Expected $expected_covered_expressions_percentage%, but the actual coverage was $ERRORCOLOR$(($covered_expression_percentage))%$CLEARCOLOR.\n"
exit 1
elif [ "$strict_coverage_analysis" == "True" ] && [ "$covered_expression_percentage" -gt "$expected_covered_expressions_percentage" ]
then
echo -e "\n==>$ERRORCOLOR ** BECAUSE STRICT COVERAGE ANALYSIS IS ENABLED **$CLEARCOLOR"
echo -e "==> Your coverage percentage is now higher than expected.$CLEARCOLOR"
echo -e "==> Expected $expected_covered_expressions_percentage% of expressions covered, but the actual value is $ERRORCOLOR$(($covered_expression_percentage))%$CLEARCOLOR."
echo -e "==> Please increase the expected coverage percentage to match.\n"
exit 1
fi
fi
# check how many uncovered expressions there are, and if that number matches our expectations
if [ "$expected_uncovered_expression_count" -ne -1 ]
then
coverage_numerator=$(grep "expressions used" __hpc_coverage_report | sed s:.*\(::g | cut -f1 -d "/")
coverage_denominator=$(grep "expressions used" __hpc_coverage_report | sed s:.*/::g | cut -f1 -d ")")
uncovered_expression_count="$(($coverage_denominator - $coverage_numerator))"
if [ "$uncovered_expression_count" -gt "$expected_uncovered_expression_count" ]
then
echo -e "\n==>$ERRORCOLOR Too many uncovered expressions.$CLEARCOLOR"
echo -e "==> Expected $expected_uncovered_expression_count uncovered expressions, but the actual count was $ERRORCOLOR$(($uncovered_expression_count))$CLEARCOLOR.\n"
exit 1
elif [ "$strict_coverage_analysis" == "True" ] && [ "$uncovered_expression_count" -lt "$expected_uncovered_expression_count" ]
then
echo -e "\n==>$ERRORCOLOR ** BECAUSE STRICT COVERAGE ANALYSIS IS ENABLED **$CLEARCOLOR"
echo -e "==>$ERRORCOLOR Your uncovered expression count is now lower than expected.$CLEARCOLOR"
echo -e "==> Expected $expected_uncovered_expression_count uncovered expressions, but there is $ERRORCOLOR$(($uncovered_expression_count))$CLEARCOLOR."
echo -e "==> Please lower the expected uncovered expression count to match.\n"
exit 1
fi
fi
# if we want an html report, run the hpc binary again with the "markup" command,
# and feed its generated files into stdout, wrapped in XML tags
if [ "$coverage_report_format" == "html" ]
then
$hpc_path markup "$tix_file_path" $hpc_dir_args $hpc_exclude_args \
--srcdir "." --srcdir "$package_path" --destdir=hpc_out > /dev/null 2>&1
cd hpc_out
echo "COVERAGE REPORT BELOW"
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
for file in *.html **/*.hs.html; do
[ -e "$file" ] || continue
echo "<coverage-report-part name=\"$file\">"
echo '<![CDATA['
cat $file
echo ']]>'
echo "</coverage-report-part>"
done
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
fi

View file

@ -0,0 +1,222 @@
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"HaskellCcInfo",
"HaskellInfo",
"HaskellLibraryInfo",
"HaskellPrebuiltPackageInfo",
"empty_HaskellCcInfo",
"merge_HaskellCcInfo",
)
load(
":private/path_utils.bzl",
"get_lib_name",
"is_shared_library",
"is_static_library",
"ln",
)
load(":private/set.bzl", "set")
def _cc_get_static_lib(lib_info):
"""Return the library to use in static linking mode.
This returns the first available library artifact in the following order:
- static_library
- pic_static_library
- dynamic_library
- interface_library
Args:
lib_info: LibraryToLink provider.
Returns:
File: The library to link against in static mode.
"""
if lib_info.static_library:
return lib_info.static_library
elif lib_info.pic_static_library:
return lib_info.pic_static_library
elif lib_info.dynamic_library:
return lib_info.dynamic_library
else:
return lib_info.interface_library
def _cc_get_dynamic_lib(lib_info):
"""Return the library to use in dynamic linking mode.
This returns the first available library artifact in the following order:
- dynamic_library
- interface_library
- pic_static_library
- static_library
Args:
lib_info: LibraryToLink provider.
Returns:
File: The library to link against in dynamic mode.
"""
if lib_info.dynamic_library:
return lib_info.dynamic_library
elif lib_info.interface_library:
return lib_info.interface_library
elif lib_info.pic_static_library:
return lib_info.pic_static_library
else:
return lib_info.static_library
def _HaskellCcInfo_from_CcInfo(ctx, cc_info):
libs_to_link = cc_info.linking_context.libraries_to_link
static_libs_to_link = []
dynamic_libs_to_link = []
static_libs_for_runtime = []
dynamic_libs_for_runtime = []
for l in libs_to_link:
_static_lib = _cc_get_static_lib(l)
dynamic_lib = _cc_get_dynamic_lib(l)
# Bazel itself only mangles dynamic libraries, not static libraries.
# However, we need the library name of the static and dynamic version
# of a library to match so that we can refer to both with one entry in
# the package configuration file. Here we rename any static archives
# with mismatching mangled dynamic library name.
static_name = get_lib_name(_static_lib)
dynamic_name = get_lib_name(dynamic_lib)
if static_name != dynamic_name:
ext = _static_lib.extension
static_lib = ctx.actions.declare_file(
"lib%s.%s" % (dynamic_name, ext),
)
ln(ctx, _static_lib, static_lib)
else:
static_lib = _static_lib
static_libs_to_link.append(static_lib)
if is_shared_library(static_lib):
static_libs_for_runtime.append(static_lib)
dynamic_libs_to_link.append(dynamic_lib)
if is_shared_library(dynamic_lib):
dynamic_libs_for_runtime.append(dynamic_lib)
return HaskellCcInfo(
static_linking = struct(
libraries_to_link = depset(
direct = static_libs_to_link,
order = "topological",
),
dynamic_libraries_for_runtime = depset(
direct = static_libs_for_runtime,
order = "topological",
),
user_link_flags = depset(
direct = cc_info.linking_context.user_link_flags,
order = "topological",
),
),
dynamic_linking = struct(
libraries_to_link = depset(
direct = dynamic_libs_to_link,
order = "topological",
),
dynamic_libraries_for_runtime = depset(
direct = dynamic_libs_for_runtime,
order = "topological",
),
user_link_flags = depset(
direct = cc_info.linking_context.user_link_flags,
order = "topological",
),
),
)
def gather_dep_info(ctx, deps):
"""Collapse dependencies into a single `HaskellInfo`.
Note that the field `prebuilt_dependencies` also includes
prebuilt_dependencies of current target.
Args:
ctx: Rule context.
deps: deps attribute.
Returns:
HaskellInfo: Unified information about all dependencies.
"""
acc = HaskellInfo(
package_ids = set.empty(),
package_databases = set.empty(),
version_macros = set.empty(),
static_libraries = [],
static_libraries_prof = [],
dynamic_libraries = set.empty(),
interface_dirs = set.empty(),
prebuilt_dependencies = set.empty(),
direct_prebuilt_deps = set.empty(),
cc_dependencies = empty_HaskellCcInfo(),
transitive_cc_dependencies = empty_HaskellCcInfo(),
)
for dep in deps:
if HaskellInfo in dep:
binfo = dep[HaskellInfo]
package_ids = acc.package_ids
if HaskellLibraryInfo not in dep:
fail("Target {0} cannot depend on binary".format(ctx.attr.name))
if HaskellLibraryInfo in dep:
set.mutable_insert(package_ids, dep[HaskellLibraryInfo].package_id)
acc = HaskellInfo(
package_ids = package_ids,
package_databases = set.mutable_union(acc.package_databases, binfo.package_databases),
version_macros = set.mutable_union(acc.version_macros, binfo.version_macros),
static_libraries = acc.static_libraries + binfo.static_libraries,
static_libraries_prof = acc.static_libraries_prof + binfo.static_libraries_prof,
dynamic_libraries = set.mutable_union(acc.dynamic_libraries, binfo.dynamic_libraries),
interface_dirs = set.mutable_union(acc.interface_dirs, binfo.interface_dirs),
prebuilt_dependencies = set.mutable_union(acc.prebuilt_dependencies, binfo.prebuilt_dependencies),
direct_prebuilt_deps = acc.direct_prebuilt_deps,
cc_dependencies = acc.cc_dependencies,
transitive_cc_dependencies = merge_HaskellCcInfo(acc.transitive_cc_dependencies, binfo.transitive_cc_dependencies),
)
elif HaskellPrebuiltPackageInfo in dep:
pkg = dep[HaskellPrebuiltPackageInfo]
acc = HaskellInfo(
package_ids = acc.package_ids,
package_databases = acc.package_databases,
version_macros = set.mutable_insert(acc.version_macros, pkg.version_macros_file),
static_libraries = acc.static_libraries,
static_libraries_prof = acc.static_libraries_prof,
dynamic_libraries = acc.dynamic_libraries,
interface_dirs = acc.interface_dirs,
prebuilt_dependencies = set.mutable_insert(acc.prebuilt_dependencies, pkg),
direct_prebuilt_deps = set.mutable_insert(acc.direct_prebuilt_deps, pkg),
cc_dependencies = acc.cc_dependencies,
transitive_cc_dependencies = acc.transitive_cc_dependencies,
)
elif CcInfo in dep and HaskellInfo not in dep:
# The final link of a binary must include all static libraries we
# depend on, including transitives ones. Theses libs are provided
# in the `CcInfo` provider.
hs_cc_info = _HaskellCcInfo_from_CcInfo(ctx, dep[CcInfo])
acc = HaskellInfo(
package_ids = acc.package_ids,
package_databases = acc.package_databases,
version_macros = acc.version_macros,
static_libraries = acc.static_libraries,
static_libraries_prof = acc.static_libraries_prof,
dynamic_libraries = acc.dynamic_libraries,
interface_dirs = acc.interface_dirs,
prebuilt_dependencies = acc.prebuilt_dependencies,
direct_prebuilt_deps = acc.direct_prebuilt_deps,
cc_dependencies = merge_HaskellCcInfo(
acc.cc_dependencies,
hs_cc_info,
),
transitive_cc_dependencies = merge_HaskellCcInfo(
acc.transitive_cc_dependencies,
hs_cc_info,
),
)
return acc

View file

@ -0,0 +1,59 @@
#!/usr/bin/env bash
#
# Usage: ghci_repl_wrapper.sh <ARGS>
# this variable is set by `bazel run`
if [ "$BUILD_WORKSPACE_DIRECTORY" = "" ]
then
cat <<EOF
It looks like you are trying to invoke the REPL incorrectly.
We only support calling the repl script with
$ bazel run <target>
for now.
If you are on bazel < 0.15 you must invoke as follows:
$ bazel run --direct_run <target>
EOF
exit 1
fi
# Derived from Bazel's Bash runfiles library (tools/bash/runfiles/runfiles.bash).
if [[ -z "$RUNFILES_DIR" ]]; then
if [[ -d "$0.runfiles" ]]; then
export RUNFILES_DIR="$0.runfiles"
fi
fi
if [[ -z "$RUNFILES_MANIFEST_FILE" ]]; then
if [[ -f "$0.runfiles_manifest" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.runfiles/MANIFEST" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles/MANIFEST"
fi
fi
# GHCi script and libraries are loaded relative to workspace directory.
# bazel run //some:target@repl will be executed from the workspace directory.
# bazel run //some:haskell_repl will be executed from its execroot.
# Explicitly change into the workspace root in that case.
cd "$BUILD_WORKSPACE_DIRECTORY"
# This is a workaround for https://github.com/bazelbuild/bazel/issues/5506
# and also for the fact that REPL script relies on so-called “convenience
# links” and the names of those links are controlled by the --symlink_prefix
# option, which can be set by the user to something unpredictable.
#
# It seems that we can't locate the files of interest/build outputs in
# general. However, due to “internal issues” in Bazel mentioned e.g.
# https://github.com/bazelbuild/bazel/issues/3796, the directory bazel-out
# is always created under the workspace directory. We exploit this to get
# location of exec root reliably and then prefix locations of various
# components, such as shared libraries with that exec root.
RULES_HASKELL_EXEC_ROOT=$(dirname $(readlink ${BUILD_WORKSPACE_DIRECTORY}/bazel-out))
TOOL_LOCATION="$RULES_HASKELL_EXEC_ROOT/{TOOL}"
{ENV}
"$TOOL_LOCATION" {ARGS} "$@"

View file

@ -0,0 +1,49 @@
#!/usr/bin/env bash
#
# Usage: haddock-wrapper.sh <PREBUILD_DEPS_FILE> <HADDOCK_ARGS>
set -eo pipefail
%{env}
PREBUILT_DEPS_FILE=$1
shift
extra_args=()
for pkg in $(< $PREBUILT_DEPS_FILE)
do
# Assumption: the `haddock-interfaces` field always only contains
# exactly one file name. This seems to hold in practice, though the
# ghc documentation defines it as:
# > (string list) A list of filenames containing Haddock interface files
# > (.haddock files) for this package.
# If there were more than one file, going by the output for the `depends`,
# the file names would be separated by a space character.
# https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/packages.html#installedpackageinfo-a-package-specification
haddock_interfaces=$(%{ghc-pkg} --simple-output field $pkg haddock-interfaces)
haddock_html=$(%{ghc-pkg} --simple-output field $pkg haddock-html)
# Sometimes the referenced `.haddock` file does not exist
# (e.g. for `nixpkgs.haskellPackages` deps with haddock disabled).
# In that case, skip this package with a warning.
if [[ -f "$haddock_interfaces" ]]
then
# TODO: link source code,
# `--read-interface=$haddock_html,$pkg_src,$haddock_interfaces
# https://haskell-haddock.readthedocs.io/en/latest/invoking.html#cmdoption-read-interface
extra_args+=("--read-interface=$haddock_html,$haddock_interfaces")
else
echo "Warning: haddock missing for package $pkg" 1>&2
fi
done
# BSD and GNU mktemp are very different; attempt GNU first
TEMP=$(mktemp -d 2>/dev/null || mktemp -d -t 'haddock_wrapper')
trap cleanup 1 2 3 6
cleanup() { rmdir "$TEMP"; }
# XXX Override TMPDIR to prevent race conditions on certain platforms.
# This is a workaround for
# https://github.com/haskell/haddock/issues/894.
TMPDIR=$TEMP %{haddock} "${extra_args[@]}" "$@"
cleanup

View file

@ -0,0 +1,668 @@
"""Implementation of core Haskell rules"""
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"C2hsLibraryInfo",
"HaskellInfo",
"HaskellLibraryInfo",
"HaskellPrebuiltPackageInfo",
)
load(":cc.bzl", "cc_interop_info")
load(
":private/actions/link.bzl",
"link_binary",
"link_library_dynamic",
"link_library_static",
)
load(":private/actions/package.bzl", "package")
load(":private/actions/repl.bzl", "build_haskell_repl")
load(":private/actions/runghc.bzl", "build_haskell_runghc")
load(":private/context.bzl", "haskell_context")
load(":private/dependencies.bzl", "gather_dep_info")
load(":private/java.bzl", "java_interop_info")
load(":private/mode.bzl", "is_profiling_enabled")
load(
":private/path_utils.bzl",
"ln",
"match_label",
"parse_pattern",
"target_unique_name",
)
load(":private/pkg_id.bzl", "pkg_id")
load(":private/set.bzl", "set")
load(":private/version_macros.bzl", "generate_version_macros")
load(":providers.bzl", "GhcPluginInfo", "HaskellCoverageInfo")
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//lib:collections.bzl", "collections")
load("@bazel_skylib//lib:shell.bzl", "shell")
def _prepare_srcs(srcs):
srcs_files = []
import_dir_map = {}
for src in srcs:
# If it has the "files" attribute, it must be a Target
if hasattr(src, "files"):
if C2hsLibraryInfo in src:
srcs_files += src.files.to_list()
for f in src.files.to_list():
import_dir_map[f] = src[C2hsLibraryInfo].import_dir
else:
srcs_files += src.files.to_list()
# otherwise it's just a file
else:
srcs_files.append(src)
return srcs_files, import_dir_map
def haskell_test_impl(ctx):
return _haskell_binary_common_impl(ctx, is_test = True)
def haskell_binary_impl(ctx):
return _haskell_binary_common_impl(ctx, is_test = False)
def _should_inspect_coverage(ctx, hs, is_test):
return hs.coverage_enabled and is_test
def _coverage_enabled_for_target(coverage_source_patterns, label):
for pat in coverage_source_patterns:
if match_label(pat, label):
return True
return False
# Mix files refer to genfile srcs including their root. Therefore, we
# must condition the src filepaths passed in for coverage to match.
def _condition_coverage_src(hs, src):
if not src.path.startswith(hs.genfiles_dir.path):
return src
""" Genfiles have the genfile directory as part of their path,
so declaring a file with the sample path actually makes the new
file double-qualified by the genfile directory.
This is necessary because mix files capture the genfile
path before compilation, and then expect those files to be
qualified by the genfile directory when `hpc report` or
`hpc markup` are used. But, genfiles included as runfiles
are no longer qualified. So, double-qualifying them results in
only one level of qualification as runfiles.
"""
conditioned_src = hs.actions.declare_file(src.path)
hs.actions.run_shell(
inputs = [src],
outputs = [conditioned_src],
arguments = [
src.path,
conditioned_src.path,
],
command = """
mkdir -p $(dirname "$2") && cp "$1" "$2"
""",
)
return conditioned_src
def _haskell_binary_common_impl(ctx, is_test):
hs = haskell_context(ctx)
dep_info = gather_dep_info(ctx, ctx.attr.deps)
plugin_dep_info = gather_dep_info(
ctx,
[dep for plugin in ctx.attr.plugins for dep in plugin[GhcPluginInfo].deps],
)
# Add any interop info for other languages.
cc = cc_interop_info(ctx)
java = java_interop_info(ctx)
with_profiling = is_profiling_enabled(hs)
srcs_files, import_dir_map = _prepare_srcs(ctx.attr.srcs)
inspect_coverage = _should_inspect_coverage(ctx, hs, is_test)
c = hs.toolchain.actions.compile_binary(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs = srcs_files,
ls_modules = ctx.executable._ls_modules,
import_dir_map = import_dir_map,
extra_srcs = depset(ctx.files.extra_srcs),
user_compile_flags = ctx.attr.compiler_flags,
dynamic = False if hs.toolchain.is_windows else not ctx.attr.linkstatic,
with_profiling = False,
main_function = ctx.attr.main_function,
version = ctx.attr.version,
inspect_coverage = inspect_coverage,
plugins = ctx.attr.plugins,
)
# gather intermediary code coverage instrumentation data
coverage_data = c.coverage_data
for dep in ctx.attr.deps:
if HaskellCoverageInfo in dep:
coverage_data += dep[HaskellCoverageInfo].coverage_data
c_p = None
if with_profiling:
c_p = hs.toolchain.actions.compile_binary(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs = srcs_files,
ls_modules = ctx.executable._ls_modules,
import_dir_map = import_dir_map,
# NOTE We must make the object files compiled without profiling
# available to this step for TH to work, presumably because GHC is
# linked against RTS without profiling.
extra_srcs = depset(transitive = [
depset(ctx.files.extra_srcs),
depset([c.objects_dir]),
]),
user_compile_flags = ctx.attr.compiler_flags,
# NOTE We can't have profiling and dynamic code at the
# same time, see:
# https://ghc.haskell.org/trac/ghc/ticket/15394
dynamic = False,
with_profiling = True,
main_function = ctx.attr.main_function,
version = ctx.attr.version,
plugins = ctx.attr.plugins,
)
(binary, solibs) = link_binary(
hs,
cc,
dep_info,
ctx.files.extra_srcs,
ctx.attr.compiler_flags,
c_p.objects_dir if with_profiling else c.objects_dir,
dynamic = False if hs.toolchain.is_windows else not ctx.attr.linkstatic,
with_profiling = with_profiling,
version = ctx.attr.version,
)
hs_info = HaskellInfo(
package_ids = dep_info.package_ids,
package_databases = dep_info.package_databases,
version_macros = set.empty(),
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
static_libraries = dep_info.static_libraries,
static_libraries_prof = dep_info.static_libraries_prof,
dynamic_libraries = dep_info.dynamic_libraries,
interface_dirs = dep_info.interface_dirs,
compile_flags = c.compile_flags,
prebuilt_dependencies = dep_info.prebuilt_dependencies,
cc_dependencies = dep_info.cc_dependencies,
transitive_cc_dependencies = dep_info.transitive_cc_dependencies,
)
cc_info = cc_common.merge_cc_infos(
cc_infos = [dep[CcInfo] for dep in ctx.attr.deps if CcInfo in dep],
)
target_files = depset([binary])
build_haskell_repl(
hs,
ghci_script = ctx.file._ghci_script,
ghci_repl_wrapper = ctx.file._ghci_repl_wrapper,
user_compile_flags = ctx.attr.compiler_flags,
repl_ghci_args = ctx.attr.repl_ghci_args,
output = ctx.outputs.repl,
package_databases = dep_info.package_databases,
version = ctx.attr.version,
hs_info = hs_info,
)
# XXX Temporary backwards compatibility hack. Remove eventually.
# See https://github.com/tweag/rules_haskell/pull/460.
ln(hs, ctx.outputs.repl, ctx.outputs.repl_deprecated)
build_haskell_runghc(
hs,
runghc_wrapper = ctx.file._ghci_repl_wrapper,
extra_args = ctx.attr.runcompile_flags,
user_compile_flags = ctx.attr.compiler_flags,
output = ctx.outputs.runghc,
package_databases = dep_info.package_databases,
version = ctx.attr.version,
hs_info = hs_info,
)
executable = binary
extra_runfiles = []
if inspect_coverage:
binary_path = paths.join(ctx.workspace_name, binary.short_path)
hpc_path = paths.join(ctx.workspace_name, hs.toolchain.tools.hpc.short_path)
tix_file_path = hs.label.name + ".tix"
mix_file_paths = [
paths.join(ctx.workspace_name, datum.mix_file.short_path)
for datum in coverage_data
]
mix_file_paths = collections.uniq(mix_file_paths) # remove duplicates
# find which modules to exclude from coverage analysis, by using the specified source patterns
raw_coverage_source_patterns = ctx.attr.experimental_coverage_source_patterns
coverage_source_patterns = [parse_pattern(ctx, pat) for pat in raw_coverage_source_patterns]
modules_to_exclude = [paths.split_extension(datum.mix_file.basename)[0] for datum in coverage_data if not _coverage_enabled_for_target(coverage_source_patterns, datum.target_label)]
modules_to_exclude = collections.uniq(modules_to_exclude) # remove duplicates
expected_covered_expressions_percentage = ctx.attr.expected_covered_expressions_percentage
expected_uncovered_expression_count = ctx.attr.expected_uncovered_expression_count
strict_coverage_analysis = ctx.attr.strict_coverage_analysis
coverage_report_format = ctx.attr.coverage_report_format
if coverage_report_format != "text" and coverage_report_format != "html":
fail("""haskell_test attribute "coverage_report_format" must be one of "text" or "html".""")
wrapper = hs.actions.declare_file("{}_coverage/coverage_wrapper.sh".format(ctx.label.name))
ctx.actions.expand_template(
template = ctx.file._coverage_wrapper_template,
output = wrapper,
substitutions = {
"{binary_path}": shell.quote(binary_path),
"{hpc_path}": shell.quote(hpc_path),
"{tix_file_path}": shell.quote(tix_file_path),
"{expected_covered_expressions_percentage}": str(expected_covered_expressions_percentage),
"{expected_uncovered_expression_count}": str(expected_uncovered_expression_count),
"{mix_file_paths}": shell.array_literal(mix_file_paths),
"{modules_to_exclude}": shell.array_literal(modules_to_exclude),
"{strict_coverage_analysis}": str(strict_coverage_analysis),
"{coverage_report_format}": shell.quote(ctx.attr.coverage_report_format),
"{package_path}": shell.quote(ctx.label.package),
},
is_executable = True,
)
executable = wrapper
mix_runfiles = [datum.mix_file for datum in coverage_data]
srcs_runfiles = [_condition_coverage_src(hs, datum.src_file) for datum in coverage_data]
extra_runfiles = [
ctx.file._bash_runfiles,
hs.toolchain.tools.hpc,
binary,
] + mix_runfiles + srcs_runfiles
return [
hs_info,
cc_info,
DefaultInfo(
executable = executable,
files = target_files,
runfiles = ctx.runfiles(
files =
solibs +
extra_runfiles,
collect_data = True,
),
),
]
def haskell_library_impl(ctx):
hs = haskell_context(ctx)
dep_info = gather_dep_info(ctx, ctx.attr.deps)
plugin_dep_info = gather_dep_info(
ctx,
[dep for plugin in ctx.attr.plugins for dep in plugin[GhcPluginInfo].deps],
)
version = ctx.attr.version if ctx.attr.version else None
my_pkg_id = pkg_id.new(ctx.label, version)
with_profiling = is_profiling_enabled(hs)
with_shared = False if hs.toolchain.is_windows else not ctx.attr.linkstatic
# Add any interop info for other languages.
cc = cc_interop_info(ctx)
java = java_interop_info(ctx)
srcs_files, import_dir_map = _prepare_srcs(ctx.attr.srcs)
other_modules = ctx.attr.hidden_modules
exposed_modules_reexports = _exposed_modules_reexports(ctx.attr.exports)
c = hs.toolchain.actions.compile_library(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs = srcs_files,
ls_modules = ctx.executable._ls_modules,
other_modules = other_modules,
exposed_modules_reexports = exposed_modules_reexports,
import_dir_map = import_dir_map,
extra_srcs = depset(ctx.files.extra_srcs),
user_compile_flags = ctx.attr.compiler_flags,
with_shared = with_shared,
with_profiling = False,
my_pkg_id = my_pkg_id,
plugins = ctx.attr.plugins,
)
c_p = None
if with_profiling:
c_p = hs.toolchain.actions.compile_library(
hs,
cc,
java,
dep_info,
plugin_dep_info,
srcs = srcs_files,
ls_modules = ctx.executable._ls_modules,
other_modules = other_modules,
exposed_modules_reexports = exposed_modules_reexports,
import_dir_map = import_dir_map,
# NOTE We must make the object files compiled without profiling
# available to this step for TH to work, presumably because GHC is
# linked against RTS without profiling.
extra_srcs = depset(transitive = [
depset(ctx.files.extra_srcs),
depset([c.objects_dir]),
]),
user_compile_flags = ctx.attr.compiler_flags,
# NOTE We can't have profiling and dynamic code at the
# same time, see:
# https://ghc.haskell.org/trac/ghc/ticket/15394
with_shared = False,
with_profiling = True,
my_pkg_id = my_pkg_id,
plugins = ctx.attr.plugins,
)
static_library = link_library_static(
hs,
cc,
dep_info,
c.objects_dir,
my_pkg_id,
with_profiling = False,
)
if with_shared:
dynamic_library = link_library_dynamic(
hs,
cc,
dep_info,
depset(ctx.files.extra_srcs),
c.objects_dir,
my_pkg_id,
)
dynamic_libraries = set.insert(
dep_info.dynamic_libraries,
dynamic_library,
)
else:
dynamic_library = None
dynamic_libraries = dep_info.dynamic_libraries
static_library_prof = None
if with_profiling:
static_library_prof = link_library_static(
hs,
cc,
dep_info,
c_p.objects_dir,
my_pkg_id,
with_profiling = True,
)
conf_file, cache_file = package(
hs,
dep_info,
c.interfaces_dir,
c_p.interfaces_dir if c_p != None else None,
static_library,
dynamic_library,
c.exposed_modules_file,
other_modules,
my_pkg_id,
static_library_prof = static_library_prof,
)
static_libraries_prof = dep_info.static_libraries_prof
if static_library_prof != None:
static_libraries_prof = [static_library_prof] + dep_info.static_libraries_prof
interface_dirs = set.union(
dep_info.interface_dirs,
set.singleton(c.interfaces_dir),
)
if c_p != None:
interface_dirs = set.mutable_union(
interface_dirs,
set.singleton(c_p.interfaces_dir),
)
version_macros = set.empty()
if version != None:
version_macros = set.singleton(
generate_version_macros(ctx, hs.name, version),
)
hs_info = HaskellInfo(
package_ids = set.insert(dep_info.package_ids, pkg_id.to_string(my_pkg_id)),
package_databases = set.insert(dep_info.package_databases, cache_file),
version_macros = version_macros,
source_files = c.source_files,
extra_source_files = c.extra_source_files,
import_dirs = c.import_dirs,
# NOTE We have to use lists for static libraries because the order is
# important for linker. Linker searches for unresolved symbols to the
# left, i.e. you first feed a library which has unresolved symbols and
# then you feed the library which resolves the symbols.
static_libraries = [static_library] + dep_info.static_libraries,
static_libraries_prof = static_libraries_prof,
dynamic_libraries = dynamic_libraries,
interface_dirs = interface_dirs,
compile_flags = c.compile_flags,
prebuilt_dependencies = dep_info.prebuilt_dependencies,
cc_dependencies = dep_info.cc_dependencies,
transitive_cc_dependencies = dep_info.transitive_cc_dependencies,
)
lib_info = HaskellLibraryInfo(
package_id = pkg_id.to_string(my_pkg_id),
version = version,
)
dep_coverage_data = []
for dep in ctx.attr.deps:
if HaskellCoverageInfo in dep:
dep_coverage_data += dep[HaskellCoverageInfo].coverage_data
coverage_info = HaskellCoverageInfo(
coverage_data = dep_coverage_data + c.coverage_data,
)
target_files = depset([file for file in [static_library, dynamic_library] if file])
if hasattr(ctx, "outputs"):
build_haskell_repl(
hs,
ghci_script = ctx.file._ghci_script,
ghci_repl_wrapper = ctx.file._ghci_repl_wrapper,
repl_ghci_args = ctx.attr.repl_ghci_args,
user_compile_flags = ctx.attr.compiler_flags,
output = ctx.outputs.repl,
package_databases = dep_info.package_databases,
version = ctx.attr.version,
hs_info = hs_info,
lib_info = lib_info,
)
# XXX Temporary backwards compatibility hack. Remove eventually.
# See https://github.com/tweag/rules_haskell/pull/460.
ln(hs, ctx.outputs.repl, ctx.outputs.repl_deprecated)
build_haskell_runghc(
hs,
runghc_wrapper = ctx.file._ghci_repl_wrapper,
extra_args = ctx.attr.runcompile_flags,
user_compile_flags = ctx.attr.compiler_flags,
output = ctx.outputs.runghc,
package_databases = dep_info.package_databases,
version = ctx.attr.version,
hs_info = hs_info,
lib_info = lib_info,
)
default_info = None
if hasattr(ctx, "runfiles"):
default_info = DefaultInfo(
files = target_files,
runfiles = ctx.runfiles(collect_data = True),
)
else:
default_info = DefaultInfo(
files = target_files,
)
# Create a CcInfo provider so that CC rules can work with
# a haskell library as if it was a regular CC one.
# XXX Workaround https://github.com/bazelbuild/bazel/issues/6874.
# Should be find_cpp_toolchain() instead.
cc_toolchain = ctx.attr._cc_toolchain[cc_common.CcToolchainInfo]
feature_configuration = cc_common.configure_features(
cc_toolchain = cc_toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
)
library_to_link = cc_common.create_library_to_link(
actions = ctx.actions,
feature_configuration = feature_configuration,
dynamic_library = dynamic_library,
static_library = static_library,
cc_toolchain = cc_toolchain,
)
compilation_context = cc_common.create_compilation_context()
linking_context = cc_common.create_linking_context(
libraries_to_link = [library_to_link],
)
cc_info = cc_common.merge_cc_infos(
cc_infos = [
CcInfo(
compilation_context = compilation_context,
linking_context = linking_context,
),
] + [dep[CcInfo] for dep in ctx.attr.deps if CcInfo in dep],
)
return [
hs_info,
cc_info,
coverage_info,
default_info,
lib_info,
]
def haskell_toolchain_library_impl(ctx):
hs = haskell_context(ctx)
if ctx.attr.package:
package = ctx.attr.package
else:
package = ctx.label.name
id_file = hs.actions.declare_file(target_unique_name(hs, "id"))
hs.actions.run_shell(
inputs = [hs.tools.ghc_pkg],
outputs = [id_file],
command = """
"$1" --simple-output -v1 field "$2" id > "$3"
""",
arguments = [
hs.tools.ghc_pkg.path,
package,
id_file.path,
],
)
version_macros_file = hs.actions.declare_file("{}_version_macros.h".format(hs.name))
hs.actions.run_shell(
inputs = [hs.tools.ghc_pkg, ctx.executable._version_macros],
outputs = [version_macros_file],
command = """
"$1" \\
`"$2" --simple-output -v1 field "$3" name` \\
`"$2" --simple-output -v1 field "$3" version` \\
> "$4"
""",
arguments = [
ctx.executable._version_macros.path,
hs.tools.ghc_pkg.path,
package,
version_macros_file.path,
],
)
prebuilt_package_info = HaskellPrebuiltPackageInfo(
package = package,
id_file = id_file,
version_macros_file = version_macros_file,
)
return [prebuilt_package_info]
def _exposed_modules_reexports(exports):
"""Creates a ghc-pkg-compatible list of reexport declarations.
A ghc-pkg registration file declares reexports as part of the
exposed-modules field in the following format:
exposed-modules: A, B, C from pkg-c:C, D from pkg-d:Original.D
Here, the Original.D module from pkg-d is renamed by virtue of a
different name being used before the "from" keyword.
This function creates a ghc-pkg-compatible list of reexport declarations
(as shown above) from a dictionary mapping package targets to "Cabal-style"
reexported-modules declarations. That is, something like:
{
":pkg-c": "C",
":pkg-d": "Original.D as D",
":pkg-e": "E1, Original.E2 as E2",
}
Args:
exports: a dictionary mapping package targets to "Cabal-style"
reexported-modules declarations.
Returns:
a ghc-pkg-compatible list of reexport declarations.
"""
exposed_reexports = []
for dep, cabal_decls in exports.items():
for cabal_decl in cabal_decls.split(","):
stripped_cabal_decl = cabal_decl.strip()
cabal_decl_parts = stripped_cabal_decl.split(" as ")
original = cabal_decl_parts[0]
if len(cabal_decl_parts) == 2:
reexported = cabal_decl_parts[1]
else:
reexported = cabal_decl_parts[0]
if HaskellPrebuiltPackageInfo in dep:
pkg = dep[HaskellPrebuiltPackageInfo].package
elif HaskellLibraryInfo in dep:
pkg = dep[HaskellLibraryInfo].package_id
exposed_reexport = "{reexported} from {pkg}:{original}".format(
reexported = reexported,
pkg = pkg,
original = original,
)
exposed_reexports.append(exposed_reexport)
return exposed_reexports

View file

@ -0,0 +1,48 @@
"""Interop with Java."""
load("@bazel_skylib//lib:collections.bzl", "collections")
JavaInteropInfo = provider(
doc = "Information needed for interop with Java rules.",
fields = {
"inputs": "Files needed during build.",
"env": "Dict with env variables that should be set during build.",
},
)
def java_interop_info(ctx):
"""Gather information from any Java dependencies.
Args:
ctx: Rule context.
Returns:
JavaInteropInfo: Information needed for Java interop.
"""
inputs = depset(
transitive = [
# We only expose direct dependencies, though we could
# expose transitive ones as well. Only exposing the direct
# ones corresponds to Bazel's "strict Java dependencies"
# mode. See
# https://github.com/tweag/rules_haskell/issues/96.
dep[JavaInfo].compile_jars
for dep in ctx.attr.deps
if JavaInfo in dep
],
)
env_dict = dict()
uniq_classpath = collections.uniq([
f.path
for f in inputs
])
if len(uniq_classpath) > 0:
env_dict["CLASSPATH"] = ":".join(uniq_classpath)
return JavaInteropInfo(
inputs = inputs,
env = env_dict,
)

View file

@ -0,0 +1,26 @@
"""Helper functions on lists."""
load(":private/set.bzl", "set")
def _dedup_on(f, list_):
"""deduplicate `list_` by comparing the result of applying
f to each element (e.g. comparing sub fields)
def compare_x(el):
return el.x
dedup_on([struct(x=3), struct(x=4), struct(x=3)], compare_x)
=> [struct(x=3), struct(x=4)]
"""
seen = set.empty()
deduped = []
for el in list_:
by = f(el)
if not set.is_member(seen, by):
set.mutable_insert(seen, by)
deduped.append(el)
return deduped
list = struct(
dedup_on = _dedup_on,
)

View file

@ -0,0 +1,109 @@
#!/usr/bin/env python
#
# Create a list of exposed modules (including reexported modules)
# given a directory full of interface files and the content of the
# global package database (to mine the versions of all prebuilt
# dependencies). The exposed modules are filtered using a provided
# list of hidden modules, and augmented with reexport declarations.
from __future__ import unicode_literals, print_function
import collections
import fnmatch
import itertools
import os
import re
import sys
import io
if len(sys.argv) != 6:
sys.exit("Usage: %s <DIRECTORY> <GLOBAL_PKG_DB> <HIDDEN_MODS_FILE> <REEXPORTED_MODS_FILE> <RESULT_FILE>" % sys.argv[0])
root = sys.argv[1]
global_pkg_db_dump = sys.argv[2]
hidden_modules_file = sys.argv[3]
reexported_modules_file = sys.argv[4]
results_file = sys.argv[5]
with io.open(global_pkg_db_dump, "r", encoding='utf8') as f:
names = [line.split()[1] for line in f if line.startswith("name:")]
f.seek(0)
ids = [line.split()[1] for line in f if line.startswith("id:")]
# A few sanity checks.
assert len(names) == len(ids)
# compute duplicate, i.e. package name associated with multiples ids
duplicates = set()
if len(names) != len(set(names)):
duplicates = set([
name for name, count in collections.Counter(names).items()
if count > 1
])
# This associate pkg name to pkg id
pkg_ids_map = dict(zip(names, ids))
with io.open(hidden_modules_file, "r", encoding='utf8') as f:
hidden_modules = [mod.strip() for mod in f.read().split(",")]
with io.open(reexported_modules_file, "r", encoding='utf8') as f:
raw_reexported_modules = (
mod.strip() for mod in f.read().split(",") if mod.strip()
)
# Substitute package ids for package names in reexports, because
# GHC really wants package ids.
regexp = re.compile("from (%s):" % "|".join(map(re.escape, pkg_ids_map)))
def replace_pkg_by_pkgid(match):
pkgname = match.group(1)
if pkgname in duplicates:
sys.exit(
"\n".join([
"Multiple versions of the following packages installed: ",
", ".join(duplicates),
"\nThe following was explictly used: " + pkgname,
"\nThis is not currently supported.",
])
)
return "from %s:" % pkg_ids_map[pkgname]
reexported_modules = (
regexp.sub(replace_pkg_by_pkgid, mod)
for mod in raw_reexported_modules
)
def handle_walk_error(e):
print("""
Failed to list interface files:
{}
On Windows you may need to enable long file path support:
Set-ItemProperty -Path 'HKLM:\SYSTEM\CurrentControlSet\Control\FileSystem' -Name 'LongPathsEnabled' -Value 1
""".strip().format(e), file=sys.stderr)
exit(1)
interface_files = (
os.path.join(path, f)
for path, dirs, files in os.walk(root, onerror=handle_walk_error)
for f in fnmatch.filter(files, '*.hi')
)
modules = (
# replace directory separators by . to generate module names
# / and \ are respectively the separators for unix (linux / darwin) and windows systems
os.path.splitext(os.path.relpath(f, start=root))[0]
.replace("/",".")
.replace("\\",".")
for f in interface_files
)
exposed_modules = (
m
for m in modules
if m not in hidden_modules
)
with io.open(results_file, "w", encoding='utf8') as f:
f.write(", ".join(itertools.chain(exposed_modules, reexported_modules)))

View file

@ -0,0 +1,12 @@
"""Compilation modes."""
def is_profiling_enabled(hs):
"""Check whether profiling mode is enabled.
Args:
hs: Haskell context.
Returns:
bool: True if the mode is enabled, False otherwise.
"""
return hs.mode == "dbg"

View file

@ -0,0 +1,313 @@
#!/bin/bash
#
# Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is a wrapper script around gcc/clang that adjusts linker flags for
# Haskell library and binary targets.
#
# Load commands that attempt to load dynamic libraries relative to the working
# directory in their package output path (bazel-out/...) are converted to load
# commands relative to @rpath. rules_haskell passes the corresponding
# -Wl,-rpath,... flags itself.
#
# rpath commands that attempt to add rpaths relative to the working directory
# to look for libraries in their package output path (bazel-out/...) are
# omitted, since rules_haskell adds more appropriate rpaths itself.
#
# GHC generates intermediate dynamic libraries outside the build tree.
# Additional RPATH entries are provided for those to make dynamic library
# dependencies in the Bazel build tree available at runtime.
#
# See https://blogs.oracle.com/dipol/entry/dynamic_libraries_rpath_and_mac
# on how to set those paths for Mach-O binaries.
#
set -euo pipefail
INSTALL_NAME_TOOL="/usr/bin/install_name_tool"
OTOOL="/usr/bin/otool"
# Collect arguments to forward in a fresh response file.
RESPONSE_FILE="$(mktemp osx_cc_args_XXXX.rsp)"
rm_response_file() {
rm -f "$RESPONSE_FILE"
}
trap rm_response_file EXIT
add_args() {
# Add the given arguments to the fresh response file. We follow GHC's
# example in storing one argument per line, wrapped in double quotes. Double
# quotes in the argument itself are escaped.
for arg in "$@"; do
printf '"%s"\n' "${arg//\"/\\\"}" >> "$RESPONSE_FILE"
done
}
# Collect library, library dir, and rpath arguments.
LIBS=()
LIB_DIRS=()
RPATHS=()
# Parser state.
# Parsing response file - unquote arguments.
QUOTES=
# Upcoming linker argument.
LINKER=
# Upcoming rpath argument.
RPATH=
# Upcoming install-name argument.
INSTALL=
# Upcoming output argument.
OUTPUT=
parse_arg() {
# Parse the given argument. Decide whether to pass it on to the compiler,
# and how it affects the parser state.
local arg="$1"
# Unquote response file arguments.
if [[ "$QUOTES" = "1" && "$arg" =~ ^\"(.*)\"$ ]]; then
# Take GHC's argument quoting into account when parsing a response
# file. Note, no indication was found that GHC would pass multiline
# arguments, or insert escape codes into the quoted arguments. If you
# observe ill-formed arguments being passed to the compiler, then this
# logic may need to be extended.
arg="${BASH_REMATCH[1]}"
fi
# Parse given argument.
if [[ "$OUTPUT" = "1" ]]; then
# The previous argument was -o. Read output file.
OUTPUT="$arg"
add_args "$arg"
elif [[ "$LINKER" = "1" ]]; then
# The previous argument was -Xlinker. Read linker argument.
if [[ "$RPATH" = "1" ]]; then
# The previous argument was -rpath. Read RPATH.
parse_rpath "$arg"
RPATH=0
elif [[ "$arg" = "-rpath" ]]; then
# rpath is coming
RPATH=1
else
# Unrecognized linker argument. Pass it on.
add_args "-Xlinker" "$arg"
fi
LINKER=
elif [[ "$INSTALL" = "1" ]]; then
INSTALL=
add_args "$arg"
elif [[ "$arg" =~ ^@(.*)$ ]]; then
# Handle response file argument. Parse the arguments contained in the
# response file one by one. Take GHC's argument quoting into account.
# Note, assumes that response file arguments are not nested in other
# response files.
QUOTES=1
while read line; do
parse_arg "$line"
done < "${BASH_REMATCH[1]}"
QUOTES=
elif [[ "$arg" = "-install_name" ]]; then
# Install name is coming. We don't use it, but it can start with an @
# and be mistaken for a response file.
INSTALL=1
add_args "$arg"
elif [[ "$arg" = "-o" ]]; then
# output is coming
OUTPUT=1
add_args "$arg"
elif [[ "$arg" = "-Xlinker" ]]; then
# linker flag is coming
LINKER=1
elif [[ "$arg" =~ ^-l(.*)$ ]]; then
LIBS+=("${BASH_REMATCH[1]}")
add_args "$arg"
elif [[ "$arg" =~ ^-L(.*)$ ]]; then
LIB_DIRS+=("${BASH_REMATCH[1]}")
add_args "$arg"
elif [[ "$arg" =~ ^-Wl,-rpath,(.*)$ ]]; then
parse_rpath "${BASH_REMATCH[1]}"
else
# Unrecognized argument. Pass it on.
add_args "$arg"
fi
}
parse_rpath() {
# Parse the given -rpath argument and decide whether it should be
# forwarded to the compiler/linker.
local rpath="$1"
if [[ "$rpath" =~ ^/ || "$rpath" =~ ^@ ]]; then
# Absolute rpaths or rpaths relative to @loader_path or similar, are
# passed on to the linker. Other relative rpaths are dropped, these
# are auto-generated by GHC, but are useless because rules_haskell
# constructs dedicated rpaths to the _solib or _hssolib directory.
# See https://github.com/tweag/rules_haskell/issues/689
add_args "-Wl,-rpath,$rpath"
RPATHS+=("$rpath")
fi
}
# Parse all given arguments.
for arg in "$@"; do
parse_arg "$arg"
done
get_library_in() {
# Find the given library in the given directory.
# Returns empty string if the library is not found.
local lib="$1"
local dir="$2"
local solib="${dir}${dir:+/}lib${lib}.so"
local dylib="${dir}${dir:+/}lib${lib}.dylib"
if [[ -f "$solib" ]]; then
echo "$solib"
elif [[ -f "$dylib" ]]; then
echo "$dylib"
fi
}
get_library_path() {
# Find the given library in the specified library search paths.
# Returns empty string if the library is not found.
if [[ ${#LIB_DIRS[@]} -gt 0 ]]; then
local libpath
for libdir in "${LIB_DIRS[@]}"; do
libpath="$(get_library_in "$1" "$libdir")"
if [[ -n "$libpath" ]]; then
echo "$libpath"
return
fi
done
fi
}
resolve_rpath() {
# Resolve the given rpath. I.e. if it is an absolute path, just return it.
# If it is relative to the output, then prepend the output path.
local rpath="$1"
if [[ "$rpath" =~ ^/ ]]; then
echo "$rpath"
elif [[ "$rpath" =~ ^@loader_path/(.*)$ || "$rpath" =~ ^@executable_path/(.*)$ ]]; then
echo "$(dirname "$OUTPUT")/${BASH_REMATCH[1]}"
else
echo "$rpath"
fi
}
get_library_rpath() {
# Find the given library in the specified rpaths.
# Returns empty string if the library is not found.
if [[ ${#RPATHS[@]} -gt 0 ]]; then
local libdir libpath
for rpath in "${RPATHS[@]}"; do
libdir="$(resolve_rpath "$rpath")"
libpath="$(get_library_in "$1" "$libdir")"
if [[ -n "$libpath" ]]; then
echo "$libpath"
return
fi
done
fi
}
get_library_name() {
# Get the "library name" of the given library.
"$OTOOL" -D "$1" | tail -1
}
relpath() {
# Find relative path from the first to the second path. Assuming the first
# is a directory. If either is an absolute path, then we return the
# absolute path to the second.
local from="$1"
local to="$2"
if [[ "$to" =~ ^/ ]]; then
echo "$to"
elif [[ "$from" =~ ^/ ]]; then
echo "$PWD/$to"
else
# Split path and store components in bash array.
IFS=/ read -a fromarr <<<"$from"
IFS=/ read -a toarr <<<"$to"
# Drop common prefix.
for ((i=0; i < ${#fromarr[@]}; ++i)); do
if [[ "${fromarr[$i]}" != "${toarr[$i]}" ]]; then
break
fi
done
# Construct relative path.
local common=$i
local out=
for ((i=$common; i < ${#fromarr[@]}; ++i)); do
out="$out${out:+/}.."
done
for ((i=$common; i < ${#toarr[@]}; ++i)); do
out="$out${out:+/}${toarr[$i]}"
done
echo $out
fi
}
generate_rpath() {
# Generate an rpath entry for the given library path.
local rpath="$(relpath "$(dirname "$OUTPUT")" "$(dirname "$1")")"
if [[ "$rpath" =~ ^/ ]]; then
echo "$rpath"
else
# Relative rpaths are relative to the binary.
echo "@loader_path${rpath:+/}$rpath"
fi
}
if [[ ! "$OUTPUT" =~ ^bazel-out/ && ${#LIBS[@]} -gt 0 ]]; then
# GHC generates temporary dynamic libraries during compilation outside of
# the build directory. References to dynamic C libraries are broken in this
# case. Here we add additional RPATHs to fix these references. The Hazel
# package for swagger2 is an example that triggers this issue.
for lib in "${LIBS[@]}"; do
librpath="$(get_library_rpath "$lib")"
if [[ -z "$librpath" ]]; then
# The given library was not found in any of the rpaths.
# Find it in the library search paths.
libpath="$(get_library_path "$lib")"
if [[ "$libpath" =~ ^bazel-out/ ]]; then
# The library is Bazel generated and loaded relative to PWD.
# Add an RPATH entry, so it is found at runtime.
rpath="$(generate_rpath "$libpath")"
parse_rpath "$rpath"
fi
fi
done
fi
# Call the C++ compiler with the fresh response file.
%{cc} "@$RESPONSE_FILE"
if [[ ${#LIBS[@]} -gt 0 ]]; then
# Replace load commands relative to the working directory, by load commands
# relative to the rpath, if the library can be found relative to an rpath.
for lib in "${LIBS[@]}"; do
librpath="$(get_library_rpath "$lib")"
if [[ -n "$librpath" ]]; then
libname="$(get_library_name "$librpath")"
if [[ "$libname" =~ ^bazel-out/ ]]; then
"${INSTALL_NAME_TOOL}" -change \
"$libname" \
"@rpath/$(basename "$librpath")" \
"$OUTPUT"
fi
fi
done
fi
# vim: ft=sh

View file

@ -0,0 +1,94 @@
"""Package list handling"""
load(":private/set.bzl", "set")
def pkg_info_to_compile_flags(pkg_info, for_plugin = False):
"""Map package info to GHC command-line arguments.
Args:
pkg_info: Package info collected by `ghc_info()`.
for_plugin: Whether the package is a plugin dependency.
Returns:
The list of command-line arguments that should be passed to GHC.
"""
namespace = "plugin-" if for_plugin else ""
args = [
# In compile.bzl, we pass this just before all -package-id
# arguments. Not doing so leads to bizarre compile-time failures.
# It turns out that equally, not doing so leads to bizarre
# link-time failures. See
# https://github.com/tweag/rules_haskell/issues/395.
"-hide-all-{}packages".format(namespace),
]
if not pkg_info.has_version:
args.extend([
# Macro version are disabled for all packages by default
# and enabled for package with version
# see https://github.com/tweag/rules_haskell/issues/414
"-fno-version-macros",
])
for package in pkg_info.packages:
args.extend(["-{}package".format(namespace), package])
for package_id in pkg_info.package_ids:
args.extend(["-{}package-id".format(namespace), package_id])
for package_db in pkg_info.package_dbs:
args.extend(["-package-db", package_db])
return args
def expose_packages(hs_info, lib_info, use_direct, use_my_pkg_id, custom_package_databases, version):
"""
Returns the information that is needed by GHC in order to enable haskell
packages.
hs_info: is common to all builds
version: if the rule contains a version, we will export the CPP version macro
All the other arguments are not understood well:
lib_info: only used for repl and linter
use_direct: only used for repl and linter
use_my_pkg_id: only used for one specific task in compile.bzl
custom_package_databases: override the package_databases of hs_info, used only by the repl
"""
has_version = version != None and version != ""
# Expose all prebuilt dependencies
#
# We have to remember to specify all (transitive) wired-in
# dependencies or we can't find objects for linking
#
# Set use_direct if hs_info does not have a direct_prebuilt_deps field.
packages = []
for prebuilt_dep in set.to_list(hs_info.direct_prebuilt_deps if use_direct else hs_info.prebuilt_dependencies):
packages.append(prebuilt_dep.package)
# Expose all bazel dependencies
package_ids = []
for package in set.to_list(hs_info.package_ids):
# XXX: repl and lint uses this lib_info flags
# It is set to None in all other usage of this function
# TODO: find the meaning of this flag
if lib_info == None or package != lib_info.package_id:
# XXX: use_my_pkg_id is not None only in compile.bzl
if (use_my_pkg_id == None) or package != use_my_pkg_id:
package_ids.append(package)
# Only include package DBs for deps, prebuilt deps should be found
# auto-magically by GHC
package_dbs = []
for cache in set.to_list(hs_info.package_databases if not custom_package_databases else custom_package_databases):
package_dbs.append(cache.dirname)
ghc_info = struct(
has_version = has_version,
packages = packages,
package_ids = package_ids,
package_dbs = package_dbs,
)
return ghc_info

View file

@ -0,0 +1,471 @@
"""Utilities for module and path manipulations."""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(":private/set.bzl", "set")
def module_name(hs, f, rel_path = None):
"""Given Haskell source file path, turn it into a dot-separated module name.
module_name(
hs,
"some-workspace/some-package/src/Foo/Bar/Baz.hs",
) => "Foo.Bar.Baz"
Args:
hs: Haskell context.
f: Haskell source file.
rel_path: Explicit relative path from import root to the module, or None
if it should be deduced.
Returns:
string: Haskell module name.
"""
rpath = rel_path
if not rpath:
rpath = _rel_path_to_module(hs, f)
(hsmod, _) = paths.split_extension(rpath.replace("/", "."))
return hsmod
def target_unique_name(hs, name_prefix):
"""Make a target-unique name.
`name_prefix` is made target-unique by adding a rule name
suffix to it. This means that given two different rules, the same
`name_prefix` is distinct. Note that this is does not disambiguate two
names within the same rule. Given a haskell_library with name foo
you could expect:
target_unique_name(hs, "libdir") => "libdir-foo"
This allows two rules using same name_prefix being built in same
environment to avoid name clashes of their output files and directories.
Args:
hs: Haskell context.
name_prefix: Template for the name.
Returns:
string: Target-unique name_prefix.
"""
return "{0}-{1}".format(name_prefix, hs.name)
def module_unique_name(hs, source_file, name_prefix):
"""Make a target- and module- unique name.
module_unique_name(
hs,
"some-workspace/some-package/src/Foo/Bar/Baz.hs",
"libdir"
) => "libdir-foo-Foo.Bar.Baz"
This is quite similar to `target_unique_name` but also uses a path built
from `source_file` to prevent clashes with other names produced using the
same `name_prefix`.
Args:
hs: Haskell context.
source_file: Source file name.
name_prefix: Template for the name.
Returns:
string: Target- and source-unique name.
"""
return "{0}-{1}".format(
target_unique_name(hs, name_prefix),
module_name(hs, source_file),
)
def declare_compiled(hs, src, ext, directory = None, rel_path = None):
"""Given a Haskell-ish source file, declare its output.
Args:
hs: Haskell context.
src: Haskell source file.
ext: New extension.
directory: String, directory prefix the new file should live in.
rel_path: Explicit relative path from import root to the module, or None
if it should be deduced.
Returns:
File: Declared output file living in `directory` with given `ext`.
"""
rpath = rel_path
if not rpath:
rpath = _rel_path_to_module(hs, src)
fp = paths.replace_extension(rpath, ext)
fp_with_dir = fp if directory == None else paths.join(directory, fp)
return hs.actions.declare_file(fp_with_dir)
def make_path(libs, prefix = None, sep = None):
"""Return a string value for using as LD_LIBRARY_PATH or similar.
Args:
libs: List of library files that should be available
prefix: String, an optional prefix to add to every path.
sep: String, the path separator, defaults to ":".
Returns:
String: paths to the given library directories separated by ":".
"""
r = set.empty()
sep = sep if sep else ":"
for lib in libs:
lib_dir = paths.dirname(lib.path)
if prefix:
lib_dir = paths.join(prefix, lib_dir)
set.mutable_insert(r, lib_dir)
return sep.join(set.to_list(r))
def darwin_convert_to_dylibs(hs, libs):
"""Convert .so dynamic libraries to .dylib.
Bazel's cc_library rule will create .so files for dynamic libraries even
on MacOS. GHC's builtin linker, which is used during compilation, GHCi,
or doctests, hard-codes the assumption that all dynamic libraries on MacOS
end on .dylib. This function serves as an adaptor and produces symlinks
from a .dylib version to the .so version for every dynamic library
dependencies that does not end on .dylib.
Args:
hs: Haskell context.
libs: List of library files dynamic or static.
Returns:
List of library files where all dynamic libraries end on .dylib.
"""
lib_prefix = "_dylibs"
new_libs = []
for lib in libs:
if is_shared_library(lib) and lib.extension != "dylib":
dylib_name = paths.join(
target_unique_name(hs, lib_prefix),
lib.dirname,
"lib" + get_lib_name(lib) + ".dylib",
)
dylib = hs.actions.declare_file(dylib_name)
ln(hs, lib, dylib)
new_libs.append(dylib)
else:
new_libs.append(lib)
return new_libs
def windows_convert_to_dlls(hs, libs):
"""Convert .so dynamic libraries to .dll.
Bazel's cc_library rule will create .so files for dynamic libraries even
on Windows. GHC's builtin linker, which is used during compilation, GHCi,
or doctests, hard-codes the assumption that all dynamic libraries on Windows
end on .dll. This function serves as an adaptor and produces symlinks
from a .dll version to the .so version for every dynamic library
dependencies that does not end on .dll.
Args:
hs: Haskell context.
libs: List of library files dynamic or static.
Returns:
List of library files where all dynamic libraries end on .dll.
"""
lib_prefix = "_dlls"
new_libs = []
for lib in libs:
if is_shared_library(lib) and lib.extension != "dll":
dll_name = paths.join(
target_unique_name(hs, lib_prefix),
paths.dirname(lib.short_path),
"lib" + get_lib_name(lib) + ".dll",
)
dll = hs.actions.declare_file(dll_name)
ln(hs, lib, dll)
new_libs.append(dll)
else:
new_libs.append(lib)
return new_libs
def get_lib_name(lib):
"""Return name of library by dropping extension and "lib" prefix.
Args:
lib: The library File.
Returns:
String: name of library.
"""
base = lib.basename[3:] if lib.basename[:3] == "lib" else lib.basename
n = base.find(".so.")
end = paths.replace_extension(base, "") if n == -1 else base[:n]
return end
def link_libraries(libs_to_link, args):
"""Add linker flags to link against the given libraries.
Args:
libs_to_link: List of library Files.
args: Append arguments to this list.
Returns:
List of library names that were linked.
"""
seen_libs = set.empty()
libraries = []
for lib in libs_to_link:
lib_name = get_lib_name(lib)
if not set.is_member(seen_libs, lib_name):
set.mutable_insert(seen_libs, lib_name)
args += ["-l{0}".format(lib_name)]
libraries.append(lib_name)
def is_shared_library(f):
"""Check if the given File is a shared library.
Args:
f: The File to check.
Returns:
Bool: True if the given file `f` is a shared library, False otherwise.
"""
return f.extension in ["so", "dylib"] or f.basename.find(".so.") != -1
def is_static_library(f):
"""Check if the given File is a static library.
Args:
f: The File to check.
Returns:
Bool: True if the given file `f` is a static library, False otherwise.
"""
return f.extension in ["a"]
def _rel_path_to_module(hs, f):
"""Make given file name relative to the directory where the module hierarchy
starts.
_rel_path_to_module(
"some-workspace/some-package/src/Foo/Bar/Baz.hs"
) => "Foo/Bar/Baz.hs"
Args:
hs: Haskell context.
f: Haskell source file.
Returns:
string: Relative path to module file.
"""
# If it's a generated file, strip off the bin or genfiles prefix.
path = f.path
if path.startswith(hs.bin_dir.path):
path = paths.relativize(path, hs.bin_dir.path)
elif path.startswith(hs.genfiles_dir.path):
path = paths.relativize(path, hs.genfiles_dir.path)
return paths.relativize(path, hs.src_root)
# TODO Consider merging with paths.relativize. See
# https://github.com/bazelbuild/bazel-skylib/pull/44.
def _truly_relativize(target, relative_to):
"""Return a relative path to `target` from `relative_to`.
Args:
target: string, path to directory we want to get relative path to.
relative_to: string, path to directory from which we are starting.
Returns:
string: relative path to `target`.
"""
t_pieces = target.split("/")
r_pieces = relative_to.split("/")
common_part_len = 0
for tp, rp in zip(t_pieces, r_pieces):
if tp == rp:
common_part_len += 1
else:
break
result = [".."] * (len(r_pieces) - common_part_len)
result += t_pieces[common_part_len:]
return "/".join(result)
def ln(hs, target, link, extra_inputs = depset()):
"""Create a symlink to target.
Args:
hs: Haskell context.
extra_inputs: extra phony dependencies of symlink.
Returns:
None
"""
relative_target = _truly_relativize(target.path, link.dirname)
hs.actions.run_shell(
inputs = depset([target], transitive = [extra_inputs]),
outputs = [link],
mnemonic = "Symlink",
command = "ln -s {target} {link}".format(
target = relative_target,
link = link.path,
),
use_default_shell_env = True,
)
def link_forest(ctx, srcs, basePath = ".", **kwargs):
"""Write a symlink to each file in `srcs` into a destination directory
defined using the same arguments as `ctx.actions.declare_directory`"""
local_files = []
for src in srcs.to_list():
dest = ctx.actions.declare_file(
paths.join(basePath, src.basename),
**kwargs
)
local_files.append(dest)
ln(ctx, src, dest)
return local_files
def copy_all(ctx, srcs, dest):
"""Copy all the files in `srcs` into `dest`"""
if list(srcs.to_list()) == []:
ctx.actions.run_shell(
command = "mkdir -p {dest}".format(dest = dest.path),
outputs = [dest],
)
else:
args = ctx.actions.args()
args.add_all(srcs)
ctx.actions.run_shell(
inputs = depset(srcs),
outputs = [dest],
mnemonic = "Copy",
command = "mkdir -p {dest} && cp -L -R \"$@\" {dest}".format(dest = dest.path),
arguments = [args],
)
def parse_pattern(ctx, pattern_str):
"""Parses a string label pattern.
Args:
ctx: Standard Bazel Rule context.
pattern_str: The pattern to parse.
Patterns are absolute labels in the local workspace. E.g.
`//some/package:some_target`. The following wild-cards are allowed:
`...`, `:all`, and `:*`. Also the `//some/package` shortcut is allowed.
Returns:
A struct of
package: A list of package path components. May end on the wildcard `...`.
target: The target name. None if the package ends on `...`. May be one
of the wildcards `all` or `*`.
NOTE: it would be better if Bazel itself exposed this functionality to Starlark.
Any feature using this function should be marked as experimental, until the
resolution of https://github.com/bazelbuild/bazel/issues/7763.
"""
# We only load targets in the local workspace anyway. So, it's never
# necessary to specify a workspace. Therefore, we don't allow it.
if pattern_str.startswith("@"):
fail("Invalid haskell_repl pattern. Patterns may not specify a workspace. They only apply to the current workspace")
# To keep things simple, all patterns have to be absolute.
if not pattern_str.startswith("//"):
if not pattern_str.startswith(":"):
fail("Invalid haskell_repl pattern. Patterns must start with either '//' or ':'.")
# if the pattern string doesn't start with a package (it starts with :, e.g. :two),
# then we prepend the contextual package
pattern_str = "//{package}{target}".format(package = ctx.label.package, target = pattern_str)
# Separate package and target (if present).
package_target = pattern_str[2:].split(":", maxsplit = 2)
package_str = package_target[0]
target_str = None
if len(package_target) == 2:
target_str = package_target[1]
# Parse package pattern.
package = []
dotdotdot = False # ... has to be last component in the pattern.
for s in package_str.split("/"):
if dotdotdot:
fail("Invalid haskell_repl pattern. ... has to appear at the end.")
if s == "...":
dotdotdot = True
package.append(s)
# Parse target pattern.
if dotdotdot:
if target_str != None:
fail("Invalid haskell_repl pattern. ... has to appear at the end.")
elif target_str == None:
if len(package) > 0 and package[-1] != "":
target_str = package[-1]
else:
fail("Invalid haskell_repl pattern. The empty string is not a valid target.")
return struct(
package = package,
target = target_str,
)
def match_label(patterns, label):
"""Whether the given local workspace label matches any of the patterns.
Args:
patterns: A list of parsed patterns to match the label against.
Apply `parse_pattern` before passing patterns into this function.
label: Match this label against the patterns.
Returns:
A boolean. True if the label is in the local workspace and matches any of
the given patterns. False otherwise.
NOTE: it would be better if Bazel itself exposed this functionality to Starlark.
Any feature using this function should be marked as experimental, until the
resolution of https://github.com/bazelbuild/bazel/issues/7763.
"""
# Only local workspace labels can match.
# Despite the docs saying otherwise, labels don't have a workspace_name
# attribute. So, we use the workspace_root. If it's empty, the target is in
# the local workspace. Otherwise, it's an external target.
if label.workspace_root != "":
return False
package = label.package.split("/")
target = label.name
# Match package components.
for i in range(min(len(patterns.package), len(package))):
if patterns.package[i] == "...":
return True
elif patterns.package[i] != package[i]:
return False
# If no wild-card or mismatch was encountered, the lengths must match.
# Otherwise, the label's package is not covered.
if len(patterns.package) != len(package):
return False
# Match target.
if patterns.target == "all" or patterns.target == "*":
return True
else:
return patterns.target == target

View file

@ -0,0 +1,67 @@
"""Package identifiers"""
load(":private/mode.bzl", "is_profiling_enabled")
load("@bazel_skylib//lib:paths.bzl", "paths")
def _zencode(s):
"""Z-escape special characters to make a valid GHC package identifier.
Args:
s: string
"""
return s.replace("Z", "ZZ").replace("_", "ZU").replace("/", "ZS")
def _to_string(my_pkg_id):
"""Get a globally unique package identifier.
The identifier is required to be unique for each Haskell rule.
It includes the Bazel package and the name of this component.
We can't use just the latter because then two components with
the same names in different packages would clash.
"""
return _zencode(
paths.join(
my_pkg_id.label.workspace_root,
my_pkg_id.label.package,
my_pkg_id.name,
),
)
def _new(label, version = None):
"""Create a new package identifier.
Package identifiers should be globally unique. This is why we use
a label to identify them.
Args:
label: The label of the rule declaring the package.
version: an optional version annotation.
Returns:
string: GHC package ID to use.
"""
return struct(
label = label,
name = label.name.replace("_", "-"),
version = version,
)
def _library_name(hs, my_pkg_id, prof_suffix = False):
"""Get library name.
Args:
hs: Haskell context.
my_pkg_id: pkg_id struct.
prof_suffix: whether to automatically add profiling suffix.
"""
library_name = "HS" + _to_string(my_pkg_id)
if is_profiling_enabled(hs) and prof_suffix:
library_name += "_p"
return library_name
pkg_id = struct(
new = _new,
to_string = _to_string,
library_name = _library_name,
)

View file

@ -0,0 +1,150 @@
"""Immutable sets that support efficient merging, traversal, and membership
check.
"""
def _empty():
"""Create an empty set.
Returns:
set, new empty set.
"""
return struct(_set_items = dict())
def _singleton(e):
"""Create a set with single element `e` inside.
Args:
e: The element to put in the set.
Returns:
set, new set.
"""
r = dict()
r[e] = None
return struct(_set_items = r)
def _is_member(s, e):
"""Return true if `e` is in the set `s`.
Args:
s: The set to inspect.
e: The element to search for.
Result:
Bool, true if `e` is in `s`, false otherwise.
"""
return e in s._set_items
def _insert(s, e):
"""Insert an element into the set.
Args:
s: Set to insert new element into.
e: The element to insert.
Result:
A copy of set `s` with `s` element added.
"""
r = dict(s._set_items)
r[e] = None
return struct(_set_items = r)
def _mutable_insert(s, e):
"""The same as `set.insert`, but modifies the first argument in place.
Args:
s: Set to insert new element into.
e: The element to insert.
Result:
set `s` with `s` element added.
"""
s._set_items[e] = None
return s
def _union(s0, s1):
"""Return union of two sets.
Args:
s0: One set.
s1: Another set.
Result:
set, union of the two sets.
"""
r = dict(s0._set_items)
r.update(s1._set_items)
return struct(_set_items = r)
def _mutable_union(s0, s1):
"""Modify set `s0` adding elements from `s1` to it.
Args:
s0: One set.
s1: Another set.
Result:
set, union of the two sets.
"""
s0._set_items.update(s1._set_items)
return s0
def _map(s, f):
"""Map elements of given set using a function.
Args:
s: Original set.
f: Function to apply to elements of the set.
Result:
set with elements obtained by application of function `f` to the
elements of `s`.
"""
return struct(_set_items = {f(x): None for x in s._set_items.keys()})
def _from_list(l):
"""Create a set containing elements from given list.
Args:
l: List, source of the elements for the new set.
Result:
set containing elements from given list.
"""
return (struct(_set_items = {x: None for x in l}))
def _to_list(s):
"""Convert set into a list of its elements.
Args:
s: Set to convert.
Returns:
List of elements of the set.
"""
return s._set_items.keys()
def _to_depset(s):
"""Similar to `set.to_list`, but produces a depset.
Args:
s: Set to convert.
Returns:
Depset of elements from the set.
"""
return depset(_to_list(s))
set = struct(
empty = _empty,
singleton = _singleton,
is_member = _is_member,
insert = _insert,
mutable_insert = _mutable_insert,
union = _union,
mutable_union = _mutable_union,
map = _map,
from_list = _from_list,
to_list = _to_list,
to_depset = _to_depset,
)

View file

@ -0,0 +1,47 @@
load(":private/set.bzl", "set")
def generate_version_macros(ctx, name, version):
"""Generate a version macros header file.
Args:
ctx: Rule context. Needs to define a _version_macros executable attribute.
name: The package name.
version: The package version.
Returns:
Version macros header File.
"""
version_macros_file = ctx.actions.declare_file("{}_version_macros.h".format(name))
ctx.actions.run_shell(
inputs = [ctx.executable._version_macros],
outputs = [version_macros_file],
command = """
"$1" "$2" "$3" > "$4"
""",
arguments = [
ctx.executable._version_macros.path,
name,
version,
version_macros_file.path,
],
)
return version_macros_file
def version_macro_includes(hs_info):
"""Generate a list of version macro header includes.
Args:
hs_info: HaskellInfo provider.
Returns:
(files, flags):
files: Set of version macros header files.
flags: List of C preprocessor flags to include version macros.
"""
files = hs_info.version_macros
flags = [
f
for include in set.to_list(files)
for f in ["-include", include.path]
]
return (files, flags)

View file

@ -0,0 +1,101 @@
#!/usr/bin/env python3
"""Generate Cabal version macros.
Generates the content of a C header file for the given library name and version
and prints it to standard output.
"""
import argparse
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("name", help="The package name.")
parser.add_argument("version", help="The package version.")
args = parser.parse_args()
print(version_macros(args.name, args.version))
def version_macros(name, version):
"""Generate Cabal version macros.
Based on Cabal's version macro generation, see [1].
[1]: http://hackage.haskell.org/package/Cabal-2.4.1.0/docs/src/Distribution.Simple.Build.Macros.html#generatePackageVersionMacros
"""
(major1, major2, minor) = version_components(version)
escaped_name = cpp_escape_name(name)
return "\n".join([
# #define VERSION_pkg "1.2.3"
cpp_ifndef_define(
"VERSION_" + escaped_name,
[],
'"{}"'.format(version),
),
# #define MIN_VERSION_pkg(major1, major2, minor) ...
cpp_ifndef_define(
"MIN_VERSION_" + escaped_name,
["major1", "major2", "minor"],
" \\\n".join([
"(",
" (major1) < {} ||".format(major1),
" (major1) == {} && (major2) < {} ||".format(major1, major2),
" (major1) == {} && (major2) == {} && (minor) <= {} )".format(
major1, major2, minor),
])),
])
def version_components(version):
"""Split version string into major1.major2.minor components."""
components = version.split(".")
num = len(components)
if num < 1:
raise ValueError("version should have at least one component.")
major1 = components[0]
if num >= 2:
major2 = components[1]
else:
major2 = "0"
if num >= 3:
minor = components[2]
else:
minor = "0"
return (major1, major2, minor)
def cpp_escape_name(name):
"""Escape package name to be CPP macro safe."""
return name.replace("-", "_")
def cpp_define(macro, params, val):
"""CPP macro definition, optionally with parameters."""
return "#define {macro}{params} {val}".format(
macro = macro,
params = "({})".format(",".join(params)) if params else "",
val = val,
)
def cpp_ifndef(macro, body):
"""CPP ifndef block."""
return "#ifndef {macro}\n{body}\n#endif /* {macro} */".format(
macro = macro,
body = body,
)
def cpp_ifndef_define(macro, params, val):
"""CPP macro definition, if not previously defined."""
return cpp_ifndef(macro, cpp_define(macro, params, val))
if __name__ == "__main__":
main()

View file

@ -0,0 +1,395 @@
"""Support for protocol buffers"""
load(
":private/haskell_impl.bzl",
_haskell_library_impl = "haskell_library_impl",
)
load("@bazel_skylib//lib:paths.bzl", "paths")
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"HaskellInfo",
"HaskellLibraryInfo",
"HaskellProtobufInfo",
)
def _capitalize_first_letter(c):
"""Capitalize the first letter of the input. Unlike the built-in
`capitalize()` method, doesn't lower-case the other characters. This helps
mimic the behavior of `proto-lens-protoc`, which turns `Foo/Bar/BAZ.proto`
into `Foo/Bar/BAZ.hs` (rather than `Foo/Bar/Baz.hs`).
Args:
c: A non-empty string word.
Returns:
The input with the first letter upper-cased.
"""
return c[0].capitalize() + c[1:]
def _camel_case(comp):
"""Camel-case the input string, preserving any existing capital letters.
"""
# Split on both "-" and "_", matching the behavior of proto-lens-protoc.
# Be sure to ignore any empty segments from input with leading or trailing
# delimiters.
return "".join([
_capitalize_first_letter(c2)
for c1 in comp.split("_")
for c2 in c1.split("-")
if len(c2) > 0
])
def _proto_lens_output_file(path):
"""The output file from `proto-lens-protoc` when run on the given `path`.
"""
path = path[:-len(".proto")]
result = "/".join([_camel_case(p) for p in path.split("/")]) + ".hs"
return "Proto/" + result
def _proto_lens_fields_file(path):
"""The fields file from `proto-lens-protoc` when run on the given `path`.
"""
path = path[:-len(".proto")]
result = "/".join([_camel_case(p) for p in path.split("/")]) + "_Fields.hs"
return "Proto/" + result
def _proto_path(proto, proto_source_roots):
"""A path to the proto file which matches any import statements."""
proto_path = proto.path
for p in proto_source_roots:
if proto_path.startswith(p):
return paths.relativize(proto_path, p)
return paths.relativize(
proto_path,
paths.join(proto.root.path, proto.owner.workspace_root),
)
def _haskell_proto_aspect_impl(target, ctx):
pb = ctx.toolchains["@io_tweag_rules_haskell//protobuf:toolchain"].tools
args = ctx.actions.args()
src_prefix = paths.join(
ctx.label.workspace_root,
ctx.label.package,
)
args.add("--plugin=protoc-gen-haskell=" + pb.plugin.path)
hs_files = []
inputs = []
direct_proto_paths = [target.proto.proto_source_root]
transitive_proto_paths = target.proto.transitive_proto_path
args.add_all([
"-I{0}={1}".format(_proto_path(s, transitive_proto_paths), s.path)
for s in target.proto.transitive_sources.to_list()
])
inputs.extend(target.proto.transitive_sources.to_list())
for src in target.proto.direct_sources:
inputs.append(src)
# As with the native rules, require the .proto file to be in the same
# Bazel package as the proto_library rule. This allows us to put the
# output .hs file next to the input .proto file. Unfortunately Skylark
# doesn't let us check the package of the file directly, so instead we
# just look at its short_path and rely on the proto_library rule itself
# to check for consistency. We use the file's path rather than its
# dirname/basename in case it's in a subdirectory; for example, if the
# proto_library rule is in "foo/BUILD" but the .proto file is
# "foo/bar/baz.proto".
if not src.path.startswith(paths.join(src.root.path, src_prefix)):
fail("Mismatch between rule context " + str(ctx.label.package) +
" and source file " + src.short_path)
if src.basename[-6:] != ".proto":
fail("bad extension for proto file " + src)
args.add(src.path)
hs_files.append(ctx.actions.declare_file(
_proto_lens_output_file(
_proto_path(src, direct_proto_paths),
),
))
hs_files.append(ctx.actions.declare_file(
_proto_lens_fields_file(
_proto_path(src, direct_proto_paths),
),
))
args.add_all([
"--proto_path=" + target.proto.proto_source_root,
"--haskell_out=no-runtime:" + paths.join(
hs_files[0].root.path,
src_prefix,
),
])
ctx.actions.run(
inputs = depset([pb.protoc, pb.plugin] + inputs),
outputs = hs_files,
mnemonic = "HaskellProtoc",
executable = pb.protoc,
arguments = [args],
)
patched_attrs = {
"compiler_flags": [],
"src_strip_prefix": "",
"repl_interpreted": True,
"repl_ghci_args": [],
"version": "",
"linkstatic": False,
"_ghci_script": ctx.attr._ghci_script,
"_ghci_repl_wrapper": ctx.attr._ghci_repl_wrapper,
"hidden_modules": [],
"exports": {},
"name": "proto-autogen-" + ctx.rule.attr.name,
"srcs": hs_files,
"deps": ctx.rule.attr.deps +
ctx.toolchains["@io_tweag_rules_haskell//protobuf:toolchain"].deps,
"prebuilt_dependencies": ctx.toolchains["@io_tweag_rules_haskell//protobuf:toolchain"].prebuilt_deps,
"plugins": [],
"_cc_toolchain": ctx.attr._cc_toolchain,
}
patched_ctx = struct(
actions = ctx.actions,
attr = struct(**patched_attrs),
bin_dir = ctx.bin_dir,
disabled_features = ctx.rule.attr.features,
executable = struct(
_ls_modules = ctx.executable._ls_modules,
),
# Necessary for CC interop (see cc.bzl).
features = ctx.rule.attr.features,
file = ctx.file,
files = struct(
srcs = hs_files,
_cc_toolchain = ctx.files._cc_toolchain,
extra_srcs = depset(),
),
genfiles_dir = ctx.genfiles_dir,
label = ctx.label,
toolchains = ctx.toolchains,
var = ctx.var,
)
# TODO this pattern match is very brittle. Let's not do this. The
# order should match the order in the return value expression in
# haskell_library_impl().
[hs_info, cc_info, coverage_info, default_info, library_info] = _haskell_library_impl(patched_ctx)
return [
cc_info, # CcInfo
hs_info, # HaskellInfo
library_info, # HaskellLibraryInfo
# We can't return DefaultInfo here because target already provides that.
HaskellProtobufInfo(files = default_info.files),
]
_haskell_proto_aspect = aspect(
_haskell_proto_aspect_impl,
attr_aspects = ["deps"],
attrs = {
"_ghci_script": attr.label(
allow_single_file = True,
default = Label("@io_tweag_rules_haskell//haskell:assets/ghci_script"),
),
"_ghci_repl_wrapper": attr.label(
allow_single_file = True,
default = Label("@io_tweag_rules_haskell//haskell:private/ghci_repl_wrapper.sh"),
),
"_ls_modules": attr.label(
executable = True,
cfg = "host",
default = Label("@io_tweag_rules_haskell//haskell:ls_modules"),
),
"_cc_toolchain": attr.label(
default = Label("@bazel_tools//tools/cpp:current_cc_toolchain"),
),
},
toolchains = [
"@io_tweag_rules_haskell//haskell:toolchain",
"@io_tweag_rules_haskell//protobuf:toolchain",
],
)
def _haskell_proto_library_impl(ctx):
dep = ctx.attr.deps[0] # FIXME
return [
dep[CcInfo],
dep[HaskellInfo],
dep[HaskellLibraryInfo],
DefaultInfo(files = dep[HaskellProtobufInfo].files),
]
haskell_proto_library = rule(
_haskell_proto_library_impl,
attrs = {
"deps": attr.label_list(
mandatory = True,
allow_files = False,
aspects = [_haskell_proto_aspect],
doc = "List of `proto_library` targets to use for generation.",
),
},
toolchains = [
"@io_tweag_rules_haskell//haskell:toolchain",
"@io_tweag_rules_haskell//protobuf:toolchain",
],
)
"""Generate Haskell library allowing to use protobuf definitions with help
of [`proto-lens`](https://github.com/google/proto-lens#readme).
Example:
```bzl
proto_library(
name = "foo_proto",
srcs = ["foo.proto"],
)
haskell_proto_library(
name = "foo_haskell_proto",
deps = [":foo_proto"],
)
```
`haskell_proto_library` targets require `haskell_proto_toolchain` to be
registered.
"""
def _protobuf_toolchain_impl(ctx):
if ctx.attr.prebuilt_deps:
print("""The attribute 'prebuilt_deps' has been deprecated,
use the 'deps' attribute instead.
""")
return [
platform_common.ToolchainInfo(
name = ctx.label.name,
tools = struct(
plugin = ctx.executable.plugin,
protoc = ctx.executable.protoc,
),
deps = ctx.attr.deps,
prebuilt_deps = ctx.attr.prebuilt_deps,
),
]
_protobuf_toolchain = rule(
_protobuf_toolchain_impl,
attrs = {
"protoc": attr.label(
executable = True,
cfg = "host",
allow_single_file = True,
mandatory = True,
doc = "protoc compiler",
),
"plugin": attr.label(
executable = True,
cfg = "host",
allow_single_file = True,
mandatory = True,
doc = "proto-lens-protoc plugin for protoc",
),
"deps": attr.label_list(
doc = "List of other Haskell libraries to be linked to protobuf libraries.",
),
"prebuilt_deps": attr.string_list(
doc = "Non-Bazel supplied Cabal dependencies for protobuf libraries.",
),
},
)
def haskell_proto_toolchain(
name,
plugin,
deps = [],
prebuilt_deps = [],
protoc = Label("@com_google_protobuf//:protoc"),
**kwargs):
"""Declare a Haskell protobuf toolchain.
You need at least one of these declared somewhere in your `BUILD` files
for the `haskell_proto_library` rules to work. Once declared, you then
need to *register* the toolchain using `register_toolchains` in your
`WORKSPACE` file (see example below).
Example:
In a `BUILD` file:
```bzl
haskell_proto_toolchain(
name = "protobuf-toolchain",
protoc = "@com_google_protobuf//:protoc",
plugin = "@hackage-proto-lens-protoc//:bin/proto-lens-protoc",
prebuilt_deps = [
"base",
"bytestring",
"containers",
"data-default-class",
"lens-family",
"lens-labels",
"proto-lens",
"text",
],
)
```
The `prebuilt_deps` and `deps` arguments allow to specify Haskell
libraries to use to compile the auto-generated source files.
In `WORKSPACE` you could have something like this:
```bzl
http_archive(
name = "com_google_protobuf",
sha256 = "cef7f1b5a7c5fba672bec2a319246e8feba471f04dcebfe362d55930ee7c1c30",
strip_prefix = "protobuf-3.5.0",
urls = ["https://github.com/google/protobuf/archive/v3.5.0.zip"],
)
nixpkgs_package(
name = "protoc_gen_haskell",
repository = "@nixpkgs",
attribute_path = "haskell.packages.ghc822.proto-lens-protoc
)
register_toolchains(
"//tests:ghc", # assuming you called your Haskell toolchain "ghc"
"//tests:protobuf-toolchain",
)
```
"""
impl_name = name + "-impl"
_protobuf_toolchain(
name = impl_name,
plugin = plugin,
deps = deps,
prebuilt_deps = prebuilt_deps,
protoc = protoc,
visibility = ["//visibility:public"],
**kwargs
)
native.toolchain(
name = name,
toolchain_type = "@io_tweag_rules_haskell//protobuf:toolchain",
toolchain = ":" + impl_name,
exec_compatible_with = [
"@bazel_tools//platforms:x86_64",
],
)

View file

@ -0,0 +1,234 @@
"""Providers exposed by the Haskell rules."""
load(
":private/path_utils.bzl",
"darwin_convert_to_dylibs",
"make_path",
"windows_convert_to_dlls",
)
HaskellCcInfo = provider(
doc = "Haskell cc dependency information. Part of HaskellInfo.",
fields = {
"static_linking": """static linking mode parameters.
A struct of
(libraries_to_link, dynamic_libraries_for_runtime, user_link_flags).
Libraries in libraries_to_link are struct(lib, mangled_lib)
because the Darwin linker needs the original library path,
while the Linux linker needs the mangled path.
""",
"dynamic_linking": """static linking mode parameters.
A struct of
(libraries_to_link, dynamic_libraries_for_runtime, user_link_flags).
Libraries in libraries_to_link are struct(lib, mangled_lib)
because the Darwin linker needs the original library path,
while the Linux linker needs the mangled path.
""",
},
)
def empty_HaskellCcInfo():
return HaskellCcInfo(
static_linking = struct(
libraries_to_link = depset(order = "topological"),
dynamic_libraries_for_runtime = depset(order = "topological"),
user_link_flags = depset(order = "topological"),
),
dynamic_linking = struct(
libraries_to_link = depset(order = "topological"),
dynamic_libraries_for_runtime = depset(order = "topological"),
user_link_flags = depset(order = "topological"),
),
)
def merge_HaskellCcInfo(*args):
return HaskellCcInfo(
static_linking = struct(
libraries_to_link = depset(
order = "topological",
transitive = [arg.static_linking.libraries_to_link for arg in args],
),
dynamic_libraries_for_runtime = depset(
order = "topological",
transitive = [arg.static_linking.dynamic_libraries_for_runtime for arg in args],
),
user_link_flags = depset(
order = "topological",
transitive = [arg.static_linking.user_link_flags for arg in args],
),
),
dynamic_linking = struct(
libraries_to_link = depset(
order = "topological",
transitive = [arg.dynamic_linking.libraries_to_link for arg in args],
),
dynamic_libraries_for_runtime = depset(
order = "topological",
transitive = [arg.dynamic_linking.dynamic_libraries_for_runtime for arg in args],
),
user_link_flags = depset(
order = "topological",
transitive = [arg.dynamic_linking.user_link_flags for arg in args],
),
),
)
HaskellInfo = provider(
doc = "Common information about build process: dependencies, etc.",
fields = {
"package_ids": "Set of all package ids of direct (non-prebuilt) dependencies.",
"package_databases": "Set of package cache files.",
"version_macros": "Set of version macro files.",
"import_dirs": "Import hierarchy roots.",
"source_files": "Set of files that contain Haskell modules.",
"extra_source_files": "A depset of non-Haskell source files.",
"static_libraries": "Ordered collection of compiled library archives.",
"static_libraries_prof": "Ordered collection of static libraries with profiling.",
"dynamic_libraries": "Set of dynamic libraries.",
"interface_dirs": "Set of interface dirs belonging to the packages.",
"compile_flags": "Arguments that were used to compile the code.",
"prebuilt_dependencies": "Transitive collection of info of wired-in Haskell dependencies.",
"direct_prebuilt_deps": "Set of info of direct prebuilt dependencies.",
"cc_dependencies": "Direct cc library dependencies. See HaskellCcInfo.",
"transitive_cc_dependencies": "Transitive cc library dependencies. See HaskellCcInfo.",
},
)
def get_libs_for_ghc_linker(hs, transitive_cc_dependencies, path_prefix = None):
"""Return all C library dependencies for GHC's linker.
GHC has it's own builtin linker. It is used for Template Haskell, for GHCi,
during doctests, etc. GHC's linker differs from the system's dynamic linker
in some ways. E.g. it strictly assumes that dynamic libraries end on .dylib
on MacOS.
This function returns a list of all transitive C library dependencies
(static or dynamic), taking the requirements of GHC's linker into account.
Args:
hs: Haskell context.
transitive_cc_dependencies: HaskellCcInfo provider.
path_prefix: Prefix for paths in GHC environment variables.
Returns:
(library_deps, ld_library_deps, env)
library_deps: List of library files suitable for GHC's builtin linker.
ld_library_deps: List of library files that should be available for
dynamic loading.
env: A mapping environment variables LIBRARY_PATH and LD_LIBRARY_PATH,
to the corresponding values as expected by GHC.
"""
trans_link_ctx = transitive_cc_dependencies.dynamic_linking
libs_to_link = trans_link_ctx.libraries_to_link.to_list()
libs_for_runtime = trans_link_ctx.dynamic_libraries_for_runtime.to_list()
_library_deps = libs_to_link
_ld_library_deps = libs_for_runtime
if hs.toolchain.is_darwin:
# GHC's builtin linker requires .dylib files on MacOS.
library_deps = darwin_convert_to_dylibs(hs, _library_deps)
# Additionally ghc 8.4 requires library_deps here although 8.6 does not
ld_library_deps = library_deps + _ld_library_deps
elif hs.toolchain.is_windows:
# GHC's builtin linker requires .dll files on Windows.
library_deps = windows_convert_to_dlls(hs, _library_deps)
# copied over from Darwin 5 lines above
ld_library_deps = library_deps + _ld_library_deps
else:
library_deps = _library_deps
ld_library_deps = _ld_library_deps
sep = ";" if hs.toolchain.is_windows else None
library_path = make_path(
library_deps,
prefix = path_prefix,
sep = sep,
)
ld_library_path = make_path(
ld_library_deps,
prefix = path_prefix,
sep = sep,
)
# GHC's builtin linker/loader looks for libraries in the paths defined by
# LIBRARY_PATH and LD_LIBRARY_PATH.
# See https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/ghci.html?highlight=library_path#extra-libraries
# In certain cases it is not enough to specify LD_LIBRARY_PATH alone, and
# libraries are only found if their path is included in LIBRARY_PATH.
# See https://github.com/tweag/rules_haskell/pull/685
env = {
"LIBRARY_PATH": library_path,
"LD_LIBRARY_PATH": ld_library_path,
}
return (library_deps, ld_library_deps, env)
HaskellLibraryInfo = provider(
doc = "Library-specific information.",
fields = {
"package_id": "Workspace unique package identifier.",
"version": "Package version.",
},
)
HaskellCoverageInfo = provider(
doc = "Information about coverage instrumentation for Haskell files.",
fields = {
"coverage_data": "A list of coverage data containing which parts of Haskell source code are being tracked for code coverage.",
},
)
HaskellPrebuiltPackageInfo = provider(
doc = "Information about a prebuilt GHC package.",
fields = {
"package": "Package name",
"id_file": "File containing package id",
"version_macros_file": "C header file containing Cabal version macros",
},
)
HaddockInfo = provider(
doc = "Haddock information.",
fields = {
"package_id": "Package id, usually of the form name-version.",
"transitive_html": "Dictionary from package id to html dirs.",
"transitive_haddocks": "Dictionary from package id to Haddock files.",
},
)
HaskellLintInfo = provider(
doc = "Provider that collects files produced by linters",
fields = {
"outputs": "Set of linter log files.",
},
)
HaskellProtobufInfo = provider(
doc = "Provider that wraps providers of auto-generated Haskell libraries",
fields = {
"files": "files",
},
)
C2hsLibraryInfo = provider(
doc = "Information about c2hs dependencies.",
fields = {
"chi_file": "c2hs interface file",
"import_dir": "Import directory containing generated Haskell source file.",
},
)
GhcPluginInfo = provider(
doc = "Encapsulates GHC plugin dependencies and tools",
fields = {
"module": "Plugin entrypoint.",
"deps": "Plugin dependencies.",
"args": "Plugin options.",
"tool_inputs": "Inputs required for plugin tools.",
"tool_input_manifests": "Plugin tools input manifests.",
},
)

View file

@ -0,0 +1,460 @@
"""Multi target Haskell REPL."""
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//lib:shell.bzl", "shell")
load("@io_tweag_rules_haskell//haskell:private/context.bzl", "haskell_context", "render_env")
load(
"@io_tweag_rules_haskell//haskell:private/path_utils.bzl",
"link_libraries",
"match_label",
"parse_pattern",
"target_unique_name",
)
load(
"@io_tweag_rules_haskell//haskell:providers.bzl",
"HaskellInfo",
"HaskellLibraryInfo",
"empty_HaskellCcInfo",
"get_libs_for_ghc_linker",
"merge_HaskellCcInfo",
)
load("@io_tweag_rules_haskell//haskell:private/set.bzl", "set")
HaskellReplLoadInfo = provider(
doc = """Haskell REPL target information.
Information to a Haskell target to load into the REPL as source.
""",
fields = {
"source_files": "Set of files that contain Haskell modules.",
"cc_dependencies": "Direct cc library dependencies. See HaskellCcInfo.",
"compiler_flags": "Flags to pass to the Haskell compiler.",
"repl_ghci_args": "Arbitrary extra arguments to pass to GHCi. This extends `compiler_flags` and `repl_ghci_args` from the toolchain",
},
)
HaskellReplDepInfo = provider(
doc = """Haskell REPL dependency information.
Information to a Haskell target to load into the REPL as a built package.
""",
fields = {
"package_ids": "Set of workspace unique package identifiers.",
"package_databases": "Set of package cache files.",
},
)
HaskellReplCollectInfo = provider(
doc = """Collect Haskell REPL information.
Holds information to generate a REPL that loads some targets as source
and some targets as built packages.
""",
fields = {
"load_infos": "Dictionary from labels to HaskellReplLoadInfo.",
"dep_infos": "Dictionary from labels to HaskellReplDepInfo.",
"prebuilt_dependencies": "Transitive collection of info of wired-in Haskell dependencies.",
"transitive_cc_dependencies": "Transitive cc library dependencies. See HaskellCcInfo.",
},
)
HaskellReplInfo = provider(
doc = """Haskell REPL information.
Holds information to generate a REPL that loads a specific set of targets
from source or as built packages.
""",
fields = {
"load_info": "Combined HaskellReplLoadInfo.",
"dep_info": "Combined HaskellReplDepInfo.",
"prebuilt_dependencies": "Transitive collection of info of wired-in Haskell dependencies.",
"transitive_cc_dependencies": "Transitive cc library dependencies. See HaskellCcInfo.",
},
)
def _merge_HaskellReplLoadInfo(load_infos):
source_files = set.empty()
cc_dependencies = empty_HaskellCcInfo()
compiler_flags = []
repl_ghci_args = []
for load_info in load_infos:
set.mutable_union(source_files, load_info.source_files)
cc_dependencies = merge_HaskellCcInfo(
cc_dependencies,
load_info.cc_dependencies,
)
compiler_flags += load_info.compiler_flags
repl_ghci_args += load_info.repl_ghci_args
return HaskellReplLoadInfo(
source_files = source_files,
cc_dependencies = cc_dependencies,
compiler_flags = compiler_flags,
repl_ghci_args = repl_ghci_args,
)
def _merge_HaskellReplDepInfo(dep_infos):
package_ids = set.empty()
package_databases = set.empty()
for dep_info in dep_infos:
set.mutable_union(package_ids, dep_info.package_ids)
set.mutable_union(package_databases, dep_info.package_databases)
return HaskellReplDepInfo(
package_ids = package_ids,
package_databases = package_databases,
)
def _create_HaskellReplCollectInfo(target, ctx):
load_infos = {}
dep_infos = {}
hs_info = target[HaskellInfo]
prebuilt_dependencies = hs_info.prebuilt_dependencies
transitive_cc_dependencies = hs_info.transitive_cc_dependencies
load_infos[target.label] = HaskellReplLoadInfo(
source_files = hs_info.source_files,
cc_dependencies = hs_info.cc_dependencies,
compiler_flags = getattr(ctx.rule.attr, "compiler_flags", []),
repl_ghci_args = getattr(ctx.rule.attr, "repl_ghci_args", []),
)
if HaskellLibraryInfo in target:
lib_info = target[HaskellLibraryInfo]
dep_infos[target.label] = HaskellReplDepInfo(
package_ids = set.singleton(lib_info.package_id),
package_databases = hs_info.package_databases,
)
return HaskellReplCollectInfo(
load_infos = load_infos,
dep_infos = dep_infos,
prebuilt_dependencies = prebuilt_dependencies,
transitive_cc_dependencies = transitive_cc_dependencies,
)
def _merge_HaskellReplCollectInfo(args):
load_infos = {}
dep_infos = {}
prebuilt_dependencies = set.empty()
transitive_cc_dependencies = empty_HaskellCcInfo()
for arg in args:
load_infos.update(arg.load_infos)
dep_infos.update(arg.dep_infos)
set.mutable_union(
prebuilt_dependencies,
arg.prebuilt_dependencies,
)
transitive_cc_dependencies = merge_HaskellCcInfo(
transitive_cc_dependencies,
arg.transitive_cc_dependencies,
)
return HaskellReplCollectInfo(
load_infos = load_infos,
dep_infos = dep_infos,
prebuilt_dependencies = prebuilt_dependencies,
transitive_cc_dependencies = transitive_cc_dependencies,
)
def _load_as_source(from_source, from_binary, lbl):
"""Whether a package should be loaded by source or as binary."""
for pat in from_binary:
if match_label(pat, lbl):
return False
for pat in from_source:
if match_label(pat, lbl):
return True
return False
def _create_HaskellReplInfo(from_source, from_binary, collect_info):
"""Convert a HaskellReplCollectInfo to a HaskellReplInfo.
Args:
from_source: List of patterns for packages to load by source.
from_binary: List of patterns for packages to load as binary packages.
collect_info: HaskellReplCollectInfo provider.
Returns:
HaskellReplInfo provider.
"""
# Collect all packages to load by source.
load_info = _merge_HaskellReplLoadInfo([
load_info
for (lbl, load_info) in collect_info.load_infos.items()
if _load_as_source(from_source, from_binary, lbl)
])
# Collect all packages to load as binary packages.
dep_info = _merge_HaskellReplDepInfo([
dep_info
for (lbl, dep_info) in collect_info.dep_infos.items()
if not _load_as_source(from_source, from_binary, lbl)
])
return HaskellReplInfo(
load_info = load_info,
dep_info = dep_info,
prebuilt_dependencies = collect_info.prebuilt_dependencies,
transitive_cc_dependencies = collect_info.transitive_cc_dependencies,
)
def _create_repl(hs, ctx, repl_info, output):
"""Build a multi target REPL.
Args:
hs: Haskell context.
ctx: Rule context.
repl_info: HaskellReplInfo provider.
output: The output for the executable REPL script.
Returns:
List of providers:
DefaultInfo provider for the executable REPL script.
"""
# The base and directory packages are necessary for the GHCi script we use
# (loads source files and brings in scope the corresponding modules).
args = ["-package", "base", "-package", "directory"]
# Load prebuilt dependencies (-package)
for dep in set.to_list(repl_info.prebuilt_dependencies):
args.extend(["-package", dep.package])
# Load built dependencies (-package-id, -package-db)
for package_id in set.to_list(repl_info.dep_info.package_ids):
args.extend(["-package-id", package_id])
for package_cache in set.to_list(repl_info.dep_info.package_databases):
args.extend([
"-package-db",
paths.join("$RULES_HASKELL_EXEC_ROOT", package_cache.dirname),
])
# Load C library dependencies
link_ctx = repl_info.load_info.cc_dependencies.dynamic_linking
libs_to_link = link_ctx.dynamic_libraries_for_runtime.to_list()
# External C libraries that we need to make available to the REPL.
libraries = link_libraries(libs_to_link, args)
# Transitive library dependencies to have in runfiles.
(library_deps, ld_library_deps, ghc_env) = get_libs_for_ghc_linker(
hs,
repl_info.transitive_cc_dependencies,
path_prefix = "$RULES_HASKELL_EXEC_ROOT",
)
library_path = [paths.dirname(lib.path) for lib in library_deps]
ld_library_path = [paths.dirname(lib.path) for lib in ld_library_deps]
# Load source files
# Force loading by source with `:add *...`.
# See https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/ghci.html#ghci-cmd-:add
add_sources = [
"*" + f.path
for f in set.to_list(repl_info.load_info.source_files)
]
ghci_repl_script = hs.actions.declare_file(
target_unique_name(hs, "ghci-repl-script"),
)
hs.actions.expand_template(
template = ctx.file._ghci_repl_script,
output = ghci_repl_script,
substitutions = {
"{ADD_SOURCES}": " ".join(add_sources),
"{COMMANDS}": "\n".join(ctx.attr.repl_ghci_commands),
},
)
args += [
"-ghci-script",
paths.join("$RULES_HASKELL_EXEC_ROOT", ghci_repl_script.path),
]
# Extra arguments.
# `compiler flags` is the default set of arguments for the repl,
# augmented by `repl_ghci_args`.
# The ordering is important, first compiler flags (from toolchain
# and local rule), then from `repl_ghci_args`. This way the more
# specific arguments are listed last, and then have more priority in
# GHC.
# Note that most flags for GHCI do have their negative value, so a
# negative flag in `repl_ghci_args` can disable a positive flag set
# in `compiler_flags`, such as `-XNoOverloadedStrings` will disable
# `-XOverloadedStrings`.
quote_args = (
hs.toolchain.compiler_flags +
repl_info.load_info.compiler_flags +
hs.toolchain.repl_ghci_args +
repl_info.load_info.repl_ghci_args +
ctx.attr.repl_ghci_args
)
hs.actions.expand_template(
template = ctx.file._ghci_repl_wrapper,
output = output,
is_executable = True,
substitutions = {
"{ENV}": render_env(ghc_env),
"{TOOL}": hs.tools.ghci.path,
"{ARGS}": " ".join(
args + [
shell.quote(a)
for a in quote_args
],
),
},
)
extra_inputs = [
hs.tools.ghci,
ghci_repl_script,
]
extra_inputs.extend(set.to_list(repl_info.load_info.source_files))
extra_inputs.extend(set.to_list(repl_info.dep_info.package_databases))
extra_inputs.extend(library_deps)
extra_inputs.extend(ld_library_deps)
return [DefaultInfo(
executable = output,
runfiles = ctx.runfiles(
files = extra_inputs,
collect_data = ctx.attr.collect_data,
),
)]
def _haskell_repl_aspect_impl(target, ctx):
if not HaskellInfo in target:
return []
target_info = _create_HaskellReplCollectInfo(target, ctx)
deps_infos = [
dep[HaskellReplCollectInfo]
for dep in ctx.rule.attr.deps
if HaskellReplCollectInfo in dep
]
collect_info = _merge_HaskellReplCollectInfo([target_info] + deps_infos)
# This aspect currently does not generate an executable REPL script by
# itself. This could be extended in future. Note, to that end it's
# necessary to construct a Haskell context without `ctx.attr.name`.
return [collect_info]
haskell_repl_aspect = aspect(
implementation = _haskell_repl_aspect_impl,
attr_aspects = ["deps"],
)
"""
Haskell REPL aspect.
Used to implement the haskell_repl rule. Does not generate an executable REPL
by itself.
"""
def _haskell_repl_impl(ctx):
collect_info = _merge_HaskellReplCollectInfo([
dep[HaskellReplCollectInfo]
for dep in ctx.attr.deps
if HaskellReplCollectInfo in dep
])
from_source = [parse_pattern(ctx, pat) for pat in ctx.attr.experimental_from_source]
from_binary = [parse_pattern(ctx, pat) for pat in ctx.attr.experimental_from_binary]
repl_info = _create_HaskellReplInfo(from_source, from_binary, collect_info)
hs = haskell_context(ctx)
return _create_repl(hs, ctx, repl_info, ctx.outputs.repl)
haskell_repl = rule(
implementation = _haskell_repl_impl,
attrs = {
"_ghci_repl_script": attr.label(
allow_single_file = True,
default = Label("@io_tweag_rules_haskell//haskell:assets/ghci_script"),
),
"_ghci_repl_wrapper": attr.label(
allow_single_file = True,
default = Label("@io_tweag_rules_haskell//haskell:private/ghci_repl_wrapper.sh"),
),
"deps": attr.label_list(
aspects = [haskell_repl_aspect],
doc = "List of Haskell targets to load into the REPL",
),
"experimental_from_source": attr.string_list(
doc = """White-list of targets to load by source.
Wild-card targets such as //... or //:all are allowed.
The black-list takes precedence over the white-list.
Note, this attribute will change depending on the outcome of
https://github.com/bazelbuild/bazel/issues/7763.
""",
default = ["//..."],
),
"experimental_from_binary": attr.string_list(
doc = """Black-list of targets to not load by source but as packages.
Wild-card targets such as //... or //:all are allowed.
The black-list takes precedence over the white-list.
Note, this attribute will change depending on the outcome of
https://github.com/bazelbuild/bazel/issues/7763.
""",
default = [],
),
"repl_ghci_args": attr.string_list(
doc = "Arbitrary extra arguments to pass to GHCi. This extends `compiler_flags` and `repl_ghci_args` from the toolchain",
default = [],
),
"repl_ghci_commands": attr.string_list(
doc = "Arbitrary extra commands to execute in GHCi.",
default = [],
),
"collect_data": attr.bool(
doc = "Whether to collect the data runfiles from the dependencies in srcs, data and deps attributes.",
default = True,
),
},
executable = True,
outputs = {
"repl": "%{name}@repl",
},
toolchains = ["@io_tweag_rules_haskell//haskell:toolchain"],
)
"""Build a REPL for multiple targets.
Example:
```bzl
haskell_repl(
name = "repl",
deps = [
"//lib:some_lib",
"//exe:some_exe",
],
experimental_from_source = [
"//lib/...",
"//exe/...",
"//common/...",
],
experimental_from_binary = [
"//lib/vendored/...",
],
)
```
Collects all transitive Haskell dependencies from `deps`. Those that match
`experimental_from_binary` or are defined in an external workspace will be
loaded as binary packages. Those that match `experimental_from_source` and
are defined in the local workspace will be loaded by source.
You can call the REPL like this:
```
$ bazel run //:repl
```
"""

View file

@ -0,0 +1,17 @@
"""Workspace rules (repositories)"""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def haskell_repositories():
"""Provide all repositories that are necessary for `rules_haskell` to
function.
"""
excludes = native.existing_rules().keys()
if "bazel_skylib" not in excludes:
http_archive(
name = "bazel_skylib",
sha256 = "eb5c57e4c12e68c0c20bc774bfbc60a568e800d025557bc4ea022c6479acc867",
strip_prefix = "bazel-skylib-0.6.0",
urls = ["https://github.com/bazelbuild/bazel-skylib/archive/0.6.0.tar.gz"],
)

View file

@ -0,0 +1,334 @@
"""Rules for defining toolchains"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(":ghc_bindist.bzl", "haskell_register_ghc_bindists")
load(
":private/actions/compile.bzl",
"compile_binary",
"compile_library",
)
load(
":private/actions/link.bzl",
"link_binary",
"link_library_dynamic",
"link_library_static",
)
load(":private/actions/package.bzl", "package")
_GHC_BINARIES = ["ghc", "ghc-pkg", "hsc2hs", "haddock", "ghci", "runghc", "hpc"]
def _run_ghc(hs, cc, inputs, outputs, mnemonic, arguments, params_file = None, env = None, progress_message = None, input_manifests = None):
if not env:
env = hs.env
args = hs.actions.args()
args.add(hs.tools.ghc)
# Do not use Bazel's CC toolchain on Windows, as it leads to linker and librarty compatibility issues.
# XXX: We should also tether Bazel's CC toolchain to GHC's, so that we can properly mix Bazel-compiled
# C libraries with Haskell targets.
if not hs.toolchain.is_windows:
args.add_all([
# GHC uses C compiler for assemly, linking and preprocessing as well.
"-pgma",
cc.tools.cc,
"-pgmc",
cc.tools.cc,
"-pgml",
cc.tools.cc,
"-pgmP",
cc.tools.cc,
# Setting -pgm* flags explicitly has the unfortunate side effect
# of resetting any program flags in the GHC settings file. So we
# restore them here. See
# https://ghc.haskell.org/trac/ghc/ticket/7929.
"-optc-fno-stack-protector",
"-optP-E",
"-optP-undef",
"-optP-traditional",
])
compile_flags_file = hs.actions.declare_file("compile_flags_%s_%s" % (hs.name, mnemonic))
extra_args_file = hs.actions.declare_file("extra_args_%s_%s" % (hs.name, mnemonic))
args.set_param_file_format("multiline")
arguments.set_param_file_format("multiline")
hs.actions.write(compile_flags_file, args)
hs.actions.write(extra_args_file, arguments)
extra_inputs = [
hs.tools.ghc,
# Depend on the version file of the Haskell toolchain,
# to ensure the version comparison check is run first.
hs.toolchain.version_file,
compile_flags_file,
extra_args_file,
] + cc.files
if params_file:
params_file_src = params_file.path
extra_inputs.append(params_file)
else:
params_file_src = "<(:)" # a temporary file with no contents
script = """
export PATH=${PATH:-} # otherwise GCC fails on Windows
# this is equivalent to 'readarray'. We do not use 'readarray' in order to
# support older bash versions.
while IFS= read -r line; do compile_flags+=("$line"); done < %s
while IFS= read -r line; do extra_args+=("$line"); done < %s
while IFS= read -r line; do param_file_args+=("$line"); done < %s
"${compile_flags[@]}" "${extra_args[@]}" ${param_file_args+"${param_file_args[@]}"}
""" % (compile_flags_file.path, extra_args_file.path, params_file_src)
ghc_wrapper_name = "ghc_wrapper_%s_%s" % (hs.name, mnemonic)
ghc_wrapper = hs.actions.declare_file(ghc_wrapper_name)
hs.actions.write(ghc_wrapper, script, is_executable = True)
extra_inputs.append(ghc_wrapper)
if type(inputs) == type(depset()):
inputs = depset(extra_inputs, transitive = [inputs])
else:
inputs += extra_inputs
hs.actions.run_shell(
inputs = inputs,
input_manifests = input_manifests,
outputs = outputs,
command = ghc_wrapper.path,
mnemonic = mnemonic,
progress_message = progress_message,
env = env,
arguments = [],
)
return args
def _haskell_toolchain_impl(ctx):
# Store the binaries of interest in ghc_binaries.
ghc_binaries = {}
for tool in _GHC_BINARIES:
for file in ctx.files.tools:
if tool in ghc_binaries:
continue
basename_no_ext = paths.split_extension(file.basename)[0]
if tool == basename_no_ext:
ghc_binaries[tool] = file
elif "%s-%s" % (tool, ctx.attr.version) == basename_no_ext:
ghc_binaries[tool] = file
if not tool in ghc_binaries:
fail("Cannot find {} in {}".format(tool, ctx.attr.tools.label))
# Run a version check on the compiler.
version_file = ctx.actions.declare_file("ghc-version")
ghc = ghc_binaries["ghc"]
ctx.actions.run_shell(
inputs = [ghc],
outputs = [version_file],
mnemonic = "HaskellVersionCheck",
command = """
{ghc} --numeric-version > {version_file}
if [[ "{expected_version}" != "$(< {version_file})" ]]
then
echo ERROR: GHC version does not match expected version.
echo Your haskell_toolchain specifies {expected_version},
echo but you have $(< {version_file}) in your environment.
exit 1
fi
""".format(
ghc = ghc.path,
version_file = version_file.path,
expected_version = ctx.attr.version,
),
)
# Get the versions of every prebuilt package.
ghc_pkg = ghc_binaries["ghc-pkg"]
pkgdb_file = ctx.actions.declare_file("ghc-global-pkgdb")
ctx.actions.run_shell(
inputs = [ghc_pkg],
outputs = [pkgdb_file],
mnemonic = "HaskellPackageDatabaseDump",
command = "{ghc_pkg} dump --global > {output}".format(
ghc_pkg = ghc_pkg.path,
output = pkgdb_file.path,
),
)
tools_struct_args = {
name.replace("-", "_"): file
for name, file in ghc_binaries.items()
}
locale_archive = None
if ctx.attr.locale_archive != None:
locale_archive = ctx.file.locale_archive
return [
platform_common.ToolchainInfo(
name = ctx.label.name,
tools = struct(**tools_struct_args),
compiler_flags = ctx.attr.compiler_flags,
repl_ghci_args = ctx.attr.repl_ghci_args,
haddock_flags = ctx.attr.haddock_flags,
locale = ctx.attr.locale,
locale_archive = locale_archive,
osx_cc_wrapper_tpl = ctx.file._osx_cc_wrapper_tpl,
mode = ctx.var["COMPILATION_MODE"],
actions = struct(
compile_binary = compile_binary,
compile_library = compile_library,
link_binary = link_binary,
link_library_dynamic = link_library_dynamic,
link_library_static = link_library_static,
package = package,
run_ghc = _run_ghc,
),
is_darwin = ctx.attr.is_darwin,
is_windows = ctx.attr.is_windows,
version = ctx.attr.version,
# Pass through the version_file, that it can be required as
# input in _run_ghc, to make every call to GHC depend on a
# successful version check.
version_file = version_file,
global_pkg_db = pkgdb_file,
),
]
_haskell_toolchain = rule(
_haskell_toolchain_impl,
attrs = {
"tools": attr.label_list(
doc = "GHC and executables that come with it. First item take precedance.",
mandatory = True,
),
"compiler_flags": attr.string_list(
doc = "A collection of flags that will be passed to GHC on every invocation.",
),
"repl_ghci_args": attr.string_list(
doc = "A collection of flags that will be passed to GHCI on repl invocation. It extends the `compiler_flags` collection. Flags set here have precedance over `compiler_flags`.",
),
"haddock_flags": attr.string_list(
doc = "A collection of flags that will be passed to haddock.",
),
"version": attr.string(
doc = "Version of your GHC compiler. It has to match the version reported by the GHC used by bazel.",
mandatory = True,
),
"is_darwin": attr.bool(
doc = "Whether compile on and for Darwin (macOS).",
mandatory = True,
),
"is_windows": attr.bool(
doc = "Whether compile on and for Windows.",
mandatory = True,
),
"locale": attr.string(
default = "en_US.UTF-8",
doc = "Locale that will be set during compiler invocations.",
),
"locale_archive": attr.label(
allow_single_file = True,
doc = """
Label pointing to the locale archive file to use. Mostly useful on NixOS.
""",
),
"_osx_cc_wrapper_tpl": attr.label(
allow_single_file = True,
default = Label("@io_tweag_rules_haskell//haskell:private/osx_cc_wrapper.sh.tpl"),
),
},
)
def haskell_toolchain(
name,
version,
tools,
exec_compatible_with = None,
target_compatible_with = None,
compiler_flags = [],
repl_ghci_args = [],
haddock_flags = [],
locale_archive = None,
**kwargs):
"""Declare a compiler toolchain.
You need at least one of these declared somewhere in your `BUILD` files
for the other rules to work. Once declared, you then need to *register*
the toolchain using `register_toolchains` in your `WORKSPACE` file (see
example below).
Example:
In a `BUILD` file:
```bzl
haskell_toolchain(
name = "ghc",
version = "1.2.3",
tools = ["@sys_ghc//:bin"],
compiler_flags = ["-Wall"],
)
```
where `@sys_ghc` is an external repository defined in the `WORKSPACE`,
e.g. using:
```bzl
nixpkgs_package(
name = 'sys_ghc',
attribute_path = 'haskell.compiler.ghc822',
)
register_toolchains("//:ghc")
```
"""
if exec_compatible_with and not target_compatible_with:
target_compatible_with = exec_compatible_with
elif target_compatible_with and not exec_compatible_with:
exec_compatible_with = target_compatible_with
impl_name = name + "-impl"
corrected_ghci_args = repl_ghci_args + ["-no-user-package-db"]
_haskell_toolchain(
name = impl_name,
version = version,
tools = tools,
compiler_flags = compiler_flags,
repl_ghci_args = corrected_ghci_args,
haddock_flags = haddock_flags,
visibility = ["//visibility:public"],
is_darwin = select({
"@io_tweag_rules_haskell//haskell/platforms:darwin": True,
"//conditions:default": False,
}),
is_windows = select({
"@io_tweag_rules_haskell//haskell/platforms:mingw32": True,
"//conditions:default": False,
}),
# Ignore this attribute on any platform that is not Linux. The
# LOCALE_ARCHIVE environment variable is a Linux-specific
# Nixpkgs hack.
locale_archive = select({
"@io_tweag_rules_haskell//haskell/platforms:linux": locale_archive,
"//conditions:default": None,
}),
**kwargs
)
native.toolchain(
name = name,
toolchain_type = "@io_tweag_rules_haskell//haskell:toolchain",
toolchain = ":" + impl_name,
exec_compatible_with = exec_compatible_with,
target_compatible_with = target_compatible_with,
)
def haskell_register_toolchains(version):
"""Download the binary distribution of GHC for your current platform
and register it as a toolchain. This currently has the same effect
as just `haskell_register_ghc_bindists(version)`.
"""
haskell_register_ghc_bindists(version)