Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Proof of Concept Dsl for Entities and Sections - WIP #48

Open
wants to merge 17 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions lib/spark.ex
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,26 @@ defmodule Spark do
Documentation for `Spark`.
"""

use Spark.Dsl,
default_extensions: [extensions: Spark.Dsl.Internal]

def load(dsl) do
Spark.Dsl.Extension.get_entities(dsl, [:top_level])
end

def load(dsl, chosen_sections) do
dsl_sections = Spark.Dsl.Extension.get_entities(dsl, [:top_level])

section_map =
for section <- dsl_sections, into: %{} do
{section.name, section}
end

for section <- chosen_sections do
Map.get(section_map, section)
end
end

@doc """
Returns all modules that implement the specified behaviour for a given otp_app.

Expand Down
6 changes: 4 additions & 2 deletions lib/spark/dsl/entity.ex
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ defmodule Spark.Dsl.Entity do
no_depend_modules: [],
schema: [],
auto_set_fields: [],
docs: ""
docs: "",
referenced_as: nil
]

alias Spark.{
Expand All @@ -100,7 +101,7 @@ defmodule Spark.Dsl.Entity do
@typedoc """
A keyword list of nested entities.
"""
@type entities :: Keyword.t(t)
@type entities :: Keyword.t([t])

@typedoc """
Specifies a function that will run on the target struct after building.
Expand Down Expand Up @@ -205,6 +206,7 @@ defmodule Spark.Dsl.Entity do
name: name(),
no_depend_modules: no_depend_modules(),
recursive_as: recursive_as(),
referenced_as: atom() | nil,
schema: OptionsHelpers.schema(),
singleton_entity_keys: singleton_entity_keys(),
snippet: snippet(),
Expand Down
137 changes: 137 additions & 0 deletions lib/spark/dsl/internal.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
defmodule Spark.Dsl.Internal do
@moduledoc false

defmodule Schema do
@moduledoc false
defstruct [:fields]
end

defmodule Field do
@moduledoc false
defstruct [:name, :type, :required, doc: ""]
end

@field %Spark.Dsl.Entity{
name: :field,
target: Field,
args: [:name],
schema: [
name: [type: :atom, required: true],
type: [type: :any, required: true],
required: [type: :boolean, required: true],
doc: [type: :string, required: false]
],
transform: {Spark.Dsl.Internal, :field, []}
}

def field(struct) do
%{name: name} = struct
options = struct |> Map.drop([:name]) |> Map.from_struct() |> Map.to_list()
{:ok, {name, options}}
end

@schema %Spark.Dsl.Entity{
name: :schema,
target: Schema,
entities: [fields: [@field]],
transform: {Spark.Dsl.Internal, :schema, []}
}

def schema(struct) do
%Schema{fields: fields} = struct
{:ok, fields}
end

@entity %Spark.Dsl.Entity{
name: :entity,
target: Spark.Dsl.Entity,
entities: [schema: [@schema]],
singleton_entity_keys: [:schema],
schema: [
args: [type: :any, required: false],
auto_set_fields: [type: :keyword_list, required: false],
deprecations: [type: :any, required: false],
describe: [type: :string, required: false],
entities: [type: :keyword_list, required: false],
examples: [type: :any, required: false],
hide: [type: :any, required: false],
identifier: [type: :any, required: false],
imports: [type: :any, required: false],
links: [type: :any, required: false],
modules: [type: :any, required: false],
name: [type: :atom, required: true],
no_depend_modules: [type: :any, required: false],
recursive_as: [type: :any, required: false],
referenced_as: [type: :atom, required: false],
singleton_entity_keys: [type: :any, required: false],
snippet: [type: :any, required: false],
target: [type: :module, required: true],
transform: [type: :any, required: false]
],
transform: {Spark.Dsl.Internal, :entity, []}
}

def entity(struct) do
updated_struct =
case struct.referenced_as do
nil -> %{struct | referenced_as: struct.name}
_ -> struct
end

{:ok, updated_struct}
end

@section %Spark.Dsl.Entity{
name: :section,
target: Spark.Dsl.Section,
entities: [schema: [@schema]],
singleton_entity_keys: [:schema],
schema: [
auto_set_fields: [type: :any, required: false],
deprecations: [type: :any, required: false],
describe: [type: :string, required: false],
entities: [type: {:list, :atom}, required: false],
examples: [type: :any, required: false],
imports: [type: :any, required: false],
links: [type: :any, required: false],
modules: [type: :any, required: false],
name: [type: :atom, required: true],
no_depend_modules: [type: :any, required: false],
patchable?: [type: :any, required: false],
referenced_as: [type: :atom, required: false],
sections: [type: {:list, :atom}, required: false],
snippet: [type: :any, required: false],
top_level?: [type: :boolean, required: false]
],
transform: {Spark.Dsl.Internal, :section, []}
}

def section(struct) do
updated_struct =
case struct.referenced_as do
nil -> %{struct | referenced_as: struct.name}
_ -> struct
end

updated_struct =
case struct.schema do
nil -> %{updated_struct | schema: []}
_ -> updated_struct
end

{:ok, updated_struct}
end

@top_level %Spark.Dsl.Section{
name: :top_level,
entities: [
@entity,
@section
],
top_level?: true
}

use Spark.Dsl.Extension,
sections: [@top_level],
transformers: [Spark.Dsl.Internal.InsertEntitesIntoSections]
end
109 changes: 109 additions & 0 deletions lib/spark/dsl/internal/insert_entities_into_sections.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
defmodule Spark.Dsl.Internal.InsertEntitesIntoSections do
@moduledoc false
use Spark.Dsl.Transformer
alias Spark.Dsl.Transformer

def transform(dsl) do
updated_dsl =
Transformer.replace_entities(dsl, [:top_level], [], &substitute_entities_into_sections/1)

{:ok, updated_dsl}
end

defp substitute_entities_into_sections(entities_and_sections) do
{entities, sections} =
Enum.reduce(entities_and_sections, {%{}, %{}}, fn element, acc ->
{entities, sections} = acc

case element do
%Spark.Dsl.Entity{referenced_as: referenced_as} ->
{Map.put(entities, referenced_as, element), sections}

%Spark.Dsl.Section{referenced_as: referenced_as} ->
{entities, Map.put(sections, referenced_as, element)}
end
end)

entities = handle_entities(entities)

sections =
for {key, section} <- sections, into: %{} do
{key, Map.put(section, :entities, Enum.map(section.entities, &Map.get(entities, &1)))}
end

sections = handle_sections(sections)
for {_, section} <- sections, do: section
end

def handle_entity_children([], entities) do
{[], entities}
end

def handle_entity_children(keyword_list, entities) do
Enum.reduce(keyword_list, {[], entities}, fn {key, value}, {result_acc, entities} ->
{result, entities} = handle_entity_node(value, entities)
{result_acc ++ [{key, result}], entities}
end)
end

def handle_entity_node(entity, entities) do
case entity do
atom when is_atom(atom) ->
e = Map.get(entities, atom)
{result, entities} = handle_entity_children(Map.get(e, :entities), entities)
e = Map.put(e, :entities, result)
{e, Map.put(entities, e.referenced_as, e)}

list when is_list(list) ->
Enum.reduce(list, {[], entities}, fn element, {result_acc, entities} ->
{result, entities} = handle_entity_node(element, entities)
{result_acc ++ [result], entities}
end)

entity ->
{result, entities} = handle_entity_children(Map.get(entity, :entities), entities)
entity = Map.put(entity, :entities, result)
{entity, Map.put(entities, entity.referenced_as, entity)}
end
end

def handle_entities(entity_map) do
Enum.reduce(entity_map, entity_map, fn {_key, value}, state ->
{_, state} = handle_entity_node(value, state)
state
end)
end

def handle_section_children([], sections) do
{[], sections}
end

def handle_section_children(list, sections) do
Enum.reduce(list, {[], sections}, fn value, {result_acc, sections} ->
{result, sections} = handle_section_node(value, sections)
{result_acc ++ [result], sections}
end)
end

def handle_section_node(section, sections) do
case section do
atom when is_atom(atom) ->
e = Map.get(sections, atom)
{result, sections} = handle_section_children(Map.get(e, :sections), sections)
e = Map.put(e, :sections, result)
{e, Map.put(sections, e.referenced_as, e)}

section ->
{result, sections} = handle_section_children(Map.get(section, :sections), sections)
section = Map.put(section, :sections, result)
{section, Map.put(sections, section.referenced_as, section)}
end
end

def handle_sections(section_map) do
Enum.reduce(section_map, section_map, fn {_key, value}, state ->
{_, state} = handle_section_node(value, state)
state
end)
end
end
4 changes: 3 additions & 1 deletion lib/spark/dsl/section.ex
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ defmodule Spark.Dsl.Section do
entities: [],
sections: [],
docs: "",
patchable?: false
patchable?: false,
referenced_as: nil
]

alias Spark.{
Expand All @@ -61,6 +62,7 @@ defmodule Spark.Dsl.Section do
no_depend_modules: [atom],
auto_set_fields: Keyword.t(any),
entities: [Entity.t()],
referenced_as: atom() | nil,
sections: [Section.t()],
docs: String.t(),
patchable?: boolean
Expand Down
6 changes: 6 additions & 0 deletions lib/spark/dsl/transformer.ex
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,12 @@ defmodule Spark.Dsl.Transformer do
end)
end

def replace_entities(dsl_state, path, replacement, replace_function) do
Map.update(dsl_state, path, %{entities: replacement, opts: []}, fn config ->
Map.update(config, :entities, replacement, replace_function)
end)
end

def replace_entity(dsl_state, path, replacement, matcher \\ nil) do
matcher =
matcher ||
Expand Down
Loading