Skip to content

Commit

Permalink
Add support for headers and dynamic base URL (philss#65)
Browse files Browse the repository at this point in the history
The :base_url attribute now accepts static headers or a custom function
that returns a URL and headers. This allows us to fetch NIFs from more
complicated sources, rather than just public GitHub releases.
  • Loading branch information
sliiser committed Jun 25, 2024
1 parent a5cd67e commit 374ab95
Show file tree
Hide file tree
Showing 3 changed files with 172 additions and 30 deletions.
68 changes: 41 additions & 27 deletions lib/rustler_precompiled.ex
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,16 @@ defmodule RustlerPrecompiled do
* `:crate` - The name of Rust crate if different from the `:otp_app`. This is optional.
* `:base_url` - A valid URL that is used as base path for the NIF file.
* `:base_url` - Location where to find the NIFs from. This should be one of the following:
* A URL to a directory containing the NIFs. The name of the NIF will be appended to it
and a GET request will be made. Works well with public GitHub releases.
* A tuple of {URL, headers}. The headers should be a list of
key-value pairs. This is useful when the NIFs are hosted in a private server.
* A tuple of {module, function}. The function is given the NIF file name and should return a URL
or a tuple of {URL, headers}. This should be used for all cases not covered by the above.
For example when multiple requests have to be made, like when using a private GitHub release
through the GitHub API, or when the URLs don't resemble a simple directory.
* `:version` - The version of precompiled assets (it is part of the NIF filename).
Expand Down Expand Up @@ -263,7 +272,7 @@ defmodule RustlerPrecompiled do
@native_dir "priv/native"

@doc """
Returns URLs for NIFs based on its module name.
Returns tuples of file names and their URLs for NIFs based on its module name.
The module name is the one that defined the NIF and this information
is stored in a metadata file.
Expand Down Expand Up @@ -320,17 +329,15 @@ defmodule RustlerPrecompiled do
variants = Map.fetch!(variants, target_triple)

for variant <- variants do
tar_gz_file_url(
base_url,
lib_name_with_ext(target_triple, lib_name <> "--" <> Atom.to_string(variant))
)
lib_name = lib_name_with_ext(target_triple, lib_name <> "--" <> Atom.to_string(variant))
{lib_name, tar_gz_file_url(base_url, lib_name)}
end
end

defp maybe_variants_tar_gz_urls(_, _, _, _), do: []

@doc """
Returns the file URLs to be downloaded for current target.
Returns tuples of file names and their URLs to be downloaded for current target.
It is in the plural because a target may have some variants for it.
It receives the NIF module.
Expand Down Expand Up @@ -362,9 +369,10 @@ defmodule RustlerPrecompiled do

defp tar_gz_urls(base_url, basename, version, nif_version, target_triple, variants) do
lib_name = lib_name(basename, version, nif_version, target_triple)
lib_name_with_ext = lib_name_with_ext(target_triple, lib_name)

[
tar_gz_file_url(base_url, lib_name_with_ext(target_triple, lib_name))
{lib_name_with_ext, tar_gz_file_url(base_url, lib_name_with_ext(target_triple, lib_name))}
| maybe_variants_tar_gz_urls(variants, base_url, target_triple, lib_name)
]
end
Expand Down Expand Up @@ -615,7 +623,7 @@ defmodule RustlerPrecompiled do

# `cache_base_dir` is a "private" option used only in tests.
cache_dir = cache_dir(config.base_cache_dir, "precompiled_nifs")
cached_tar_gz = Path.join(cache_dir, "#{file_name}.tar.gz")
cached_tar_gz = Path.join(cache_dir, file_name)

{:ok,
Map.merge(basic_metadata, %{
Expand Down Expand Up @@ -840,21 +848,34 @@ defmodule RustlerPrecompiled do
"so"
end

"#{lib_name}.#{ext}"
"#{lib_name}.#{ext}.tar.gz"
end

defp tar_gz_file_url(base_url, file_name) do
defp tar_gz_file_url({module, function_name}, file_name)
when is_atom(module) and is_atom(function_name) do
apply(module, function_name, [file_name])
end

defp tar_gz_file_url({base_url, request_headers}, file_name) do
uri = URI.parse(base_url)

uri =
Map.update!(uri, :path, fn path ->
Path.join(path || "", "#{file_name}.tar.gz")
Path.join(path || "", file_name)
end)

to_string(uri)
{to_string(uri), request_headers}
end

defp download_nif_artifact(url) do
defp tar_gz_file_url(base_url, file_name) do
tar_gz_file_url({base_url, []}, file_name)
end

defp download_nif_artifact(url) when is_binary(url) do
download_nif_artifact({url, []})
end

defp download_nif_artifact({url, request_headers}) do
url = String.to_charlist(url)
Logger.debug("Downloading NIF from #{url}")

Expand Down Expand Up @@ -895,7 +916,7 @@ defmodule RustlerPrecompiled do

options = [body_format: :binary]

case :httpc.request(:get, {url, []}, http_options, options) do
case :httpc.request(:get, {url, request_headers}, http_options, options) do
{:ok, {{_, 200, _}, _headers, body}} ->
{:ok, body}

Expand All @@ -912,16 +933,17 @@ defmodule RustlerPrecompiled do
attempts = max_retries(options)

download_results =
for url <- urls, do: {url, with_retry(fn -> download_nif_artifact(url) end, attempts)}
for {lib_name, url} <- urls,
do: {lib_name, with_retry(fn -> download_nif_artifact(url) end, attempts)}

cache_dir = cache_dir("precompiled_nifs")
:ok = File.mkdir_p(cache_dir)

Enum.flat_map(download_results, fn result ->
with {:download, {url, download_result}} <- {:download, result},
with {:download, {lib_name, download_result}} <- {:download, result},
{:download_result, {:ok, body}} <- {:download_result, download_result},
hash <- :crypto.hash(@checksum_algo, body),
path <- Path.join(cache_dir, basename_from_url(url)),
path <- Path.join(cache_dir, lib_name),
{:file, :ok} <- {:file, File.write(path, body)} do
checksum = Base.encode16(hash, case: :lower)

Expand All @@ -931,7 +953,7 @@ defmodule RustlerPrecompiled do

[
%{
url: url,
lib_name: lib_name,
path: path,
checksum: checksum,
checksum_algo: @checksum_algo
Expand Down Expand Up @@ -985,14 +1007,6 @@ defmodule RustlerPrecompiled do
end)
end

defp basename_from_url(url) do
uri = URI.parse(url)

uri.path
|> String.split("/")
|> List.last()
end

defp read_map_from_file(file) do
with {:ok, contents} <- File.read(file),
{%{} = contents, _} <- Code.eval_string(contents) do
Expand Down
22 changes: 20 additions & 2 deletions lib/rustler_precompiled/config.ex
Original file line number Diff line number Diff line change
Expand Up @@ -83,16 +83,34 @@ defmodule RustlerPrecompiled.Config do

defp validate_base_url!(nil), do: raise_for_nil_field_value(:base_url)

defp validate_base_url!(base_url) do
defp validate_base_url!({base_url, headers}) when is_binary(base_url) and is_list(headers) do
case :uri_string.parse(base_url) do
%{} ->
base_url
if Enum.all?(headers, &match?({key, value} when is_list(key) and is_binary(value), &1)) do
{base_url, headers}
else
raise "`:base_url` for `RustlerPrecompiled` must be a list of `{charlist(),binary()}`"
end

{:error, :invalid_uri, error} ->
raise "`:base_url` for `RustlerPrecompiled` is invalid: #{inspect(to_string(error))}"
end
end

defp validate_base_url!({module, function}) when is_atom(module) and is_atom(function) do
Code.ensure_compiled!(module)

if Kernel.function_exported?(module, function, 1) do
{module, function}
else
raise "`:base_url` for `RustlerPrecompiled` is a function that does not exist: #{inspect(module)}.#{function}"
end
end

defp validate_base_url!(base_url) when is_binary(base_url) do
validate_base_url!({base_url, []})
end

defp validate_list!(nil, option, _valid_values), do: raise_for_nil_field_value(option)

defp validate_list!([_ | _] = values, option, valid_values) do
Expand Down
112 changes: 111 additions & 1 deletion test/rustler_precompiled_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -500,6 +500,111 @@ defmodule RustlerPrecompiledTest do
end)
end

@tag :tmp_dir
test "a project downloading precompiled NIFs with custom header", %{
tmp_dir: tmp_dir,
checksum_sample: checksum_sample,
nif_fixtures_dir: nif_fixtures_dir
} do
bypass = Bypass.open()

in_tmp(tmp_dir, fn ->
File.write!("checksum-Elixir.RustlerPrecompilationExample.Native.exs", checksum_sample)

Bypass.expect_once(bypass, fn conn ->
file_name = List.last(conn.path_info)
file = File.read!(Path.join([nif_fixtures_dir, "precompiled_nifs", file_name]))

if Plug.Conn.get_req_header(conn, "authorization") == ["Token 123"] do
Plug.Conn.resp(conn, 200, file)
else
Plug.Conn.resp(conn, 401, "Unauthorized")
end
end)

result =
capture_log(fn ->
config = %RustlerPrecompiled.Config{
otp_app: :rustler_precompiled,
module: RustlerPrecompilationExample.Native,
base_cache_dir: tmp_dir,
base_url:
{"http://localhost:#{bypass.port}/download", [{'authorization', "Token 123"}]},
version: "0.2.0",
crate: "example",
targets: @available_targets,
nif_versions: @default_nif_versions
}

{:ok, metadata} = RustlerPrecompiled.build_metadata(config)

assert {:ok, result} = RustlerPrecompiled.download_or_reuse_nif_file(config, metadata)

assert result.load?
assert {:rustler_precompiled, path} = result.load_from

assert path =~ "priv/native"
assert path =~ "example-v0.2.0-nif"
end)

assert result =~ "Downloading"
assert result =~ "http://localhost:#{bypass.port}/download"
assert result =~ "NIF cached at"
end)
end

@tag :tmp_dir
test "a project downloading precompiled NIFs with custom handler", %{
tmp_dir: tmp_dir,
checksum_sample: checksum_sample,
nif_fixtures_dir: nif_fixtures_dir
} do
bypass = Bypass.open(port: 1234)

in_tmp(tmp_dir, fn ->
File.write!("checksum-Elixir.RustlerPrecompilationExample.Native.exs", checksum_sample)

Bypass.expect_once(bypass, fn conn ->
%{"file_name" => file_name} = URI.decode_query(conn.query_string)
file = File.read!(Path.join([nif_fixtures_dir, "precompiled_nifs", file_name]))

if Plug.Conn.get_req_header(conn, "authorization") == ["Token 123"] do
Plug.Conn.resp(conn, 200, file)
else
Plug.Conn.resp(conn, 401, "Unauthorized")
end
end)

result =
capture_log(fn ->
config = %RustlerPrecompiled.Config{
otp_app: :rustler_precompiled,
module: RustlerPrecompilationExample.Native,
base_cache_dir: tmp_dir,
base_url: {__MODULE__, :url_with_headers},
version: "0.2.0",
crate: "example",
targets: @available_targets,
nif_versions: @default_nif_versions
}

{:ok, metadata} = RustlerPrecompiled.build_metadata(config)

assert {:ok, result} = RustlerPrecompiled.download_or_reuse_nif_file(config, metadata)

assert result.load?
assert {:rustler_precompiled, path} = result.load_from

assert path =~ "priv/native"
assert path =~ "example-v0.2.0-nif"
end)

assert result =~ "Downloading"
assert result =~ "http://localhost:#{bypass.port}/download"
assert result =~ "NIF cached at"
end)
end

@tag :tmp_dir
test "a project downloading precompiled NIFs with retry", %{
tmp_dir: tmp_dir,
Expand Down Expand Up @@ -910,7 +1015,7 @@ defmodule RustlerPrecompiledTest do
"libexample-v0.2.0-nif-2.17-x86_64-unknown-linux-gnu.so.tar.gz",
"libexample-v0.2.0-nif-2.17-x86_64-unknown-linux-musl.so.tar.gz"
]
|> Enum.map(fn file_name -> "#{base_url}/#{file_name}" end)
|> Enum.map(fn file_name -> {file_name, {"#{base_url}/#{file_name}", []}} end)
end

test "does not build list of tar gz urls due to missing metadata field" do
Expand Down Expand Up @@ -955,4 +1060,9 @@ defmodule RustlerPrecompiledTest do
|> Base.encode64()
|> binary_part(0, len)
end

def url_with_headers(file_name) do
{"http://localhost:1234/download?file_name=#{file_name}&foo=bar",
[{'authorization', "Token 123"}]}
end
end

0 comments on commit 374ab95

Please sign in to comment.