From 010c630f81759b5864c31f468643a77144ef3a9f Mon Sep 17 00:00:00 2001 From: Hugo Levy-Falk Date: Sat, 1 Apr 2023 10:31:32 +0200 Subject: [PATCH 1/9] feat: Add JET.jl tests --- test/Project.toml | 1 + test/code_analysis_tests/test_jet.jl | 26 ++++++++++++++++++++++++++ test/runtests.jl | 3 +++ 3 files changed, 30 insertions(+) create mode 100644 test/code_analysis_tests/test_jet.jl diff --git a/test/Project.toml b/test/Project.toml index 7d9f2f7..6744bdf 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -1,6 +1,7 @@ [deps] AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" Gumbo = "708ec375-b3d6-5a57-a7ce-8257bf98657a" +JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/test/code_analysis_tests/test_jet.jl b/test/code_analysis_tests/test_jet.jl new file mode 100644 index 0000000..5e86914 --- /dev/null +++ b/test/code_analysis_tests/test_jet.jl @@ -0,0 +1,26 @@ +using JET +@testset "JET.jl -> See https://aviatesk.github.io/JET.jl/stable/jetanalysis/#Errors-kinds-and-how-to-fix-them" begin + +payload = open(Norg.NORG_SPEC_PATH, "r") do f + read(f, String) +end + +# Error analysis + +# Parse the entire spec +@test_call mode=:sound norg(payload) +ast = norg(payload) +# HTML codegen +@test_call mode=:sound Norg.codegen(HTMLTarget(), ast) +# JSON codegen +@test_call mode=:sound Norg.codegen(JSONTarget(), ast) + +# Optimization analysis +# Parsing +@test_opt norg(payload) +# Codegen +@test_opt Norg.codegen(HTMLTarget(), payload) +@test_opt Norg.codegen(JSONTarget(), payload) +end + + diff --git a/test/runtests.jl b/test/runtests.jl index 186ca7f..ab00b2a 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -23,4 +23,7 @@ import Norg: @K_str, kind, value include("codegen_tests/html.jl") include("codegen_tests/json.jl") end + @testset "code analysis" begin + include("code_analysis_tests/test_jet.jl") + end end From 70afb64a405023ccedd450bee921a562c42f6a38 Mon Sep 17 00:00:00 2001 From: Hugo Levy-Falk Date: Sun, 2 Apr 2023 10:20:32 +0200 Subject: [PATCH 2/9] feat: Make most of the code comply with JET.jl test set. --- src/ast.jl | 4 +- src/codegen/html.jl | 2 +- src/codegen/json.jl | 29 +++++++----- src/match/attached_modifiers.jl | 7 +-- src/match/detached_modifier_suffix.jl | 3 -- src/match/detached_modifiers.jl | 59 +++++++++++------------- src/match/match.jl | 29 ++++-------- src/match/rangeable_detached_modifier.jl | 9 ---- src/match/tags.jl | 4 -- src/parser/attachedmodifier.jl | 2 - src/parser/detachedmodifierextensions.jl | 2 +- src/parser/detachedmodifiersuffix.jl | 18 ++++---- src/parser/link.jl | 37 ++++++++------- src/parser/nestablemodifier.jl | 2 - src/parser/parser.jl | 5 +- src/parser/rangeabledetachedmodifier.jl | 13 +----- src/parser/structuralmodifier.jl | 3 +- src/parser/tag.jl | 14 +----- src/tokens.jl | 2 +- src/utils.jl | 9 ++-- test/ast_tests/test_headings.jl | 4 +- test/code_analysis_tests/test_jet.jl | 14 +++--- 22 files changed, 108 insertions(+), 163 deletions(-) diff --git a/src/ast.jl b/src/ast.jl index 2fedba3..d049368 100644 --- a/src/ast.jl +++ b/src/ast.jl @@ -96,7 +96,7 @@ function heading_level(k::Kind) error("No matching Heading kind found.") end end -function heading_level(level::Int) +function heading_kind(level::Int) if level <= 1 K"Heading1" elseif level == 2 @@ -245,6 +245,6 @@ function nestable_level(k::Kind) end end -export is_first_class_node, heading_level, unordered_list_level, ordered_list_level, quote_level, nestable_level, litteral, NorgDocument, Node +export is_first_class_node, heading_kind, heading_level, unordered_list_level, ordered_list_level, quote_level, nestable_level, litteral, NorgDocument, Node end diff --git a/src/codegen/html.jl b/src/codegen/html.jl index 3725ee2..52754b2 100644 --- a/src/codegen/html.jl +++ b/src/codegen/html.jl @@ -64,7 +64,7 @@ function codegen(t::HTMLTarget, ast::NorgDocument) footnotes = getchildren(ast.root, K"Footnote") items = Iterators.flatten(children.(footnotes)) else # collect all orphan footnotes - footnotes = getchildren(ast.root, K"Footnote", AST.heading_level(t.footnotes_level)) + footnotes = getchildren(ast.root, K"Footnote", AST.heading_kind(Int(t.footnotes_level))) items = Iterators.flatten(children.(footnotes)) end footnotes_node = @htl """ diff --git a/src/codegen/json.jl b/src/codegen/json.jl index 14e6470..baaafc1 100644 --- a/src/codegen/json.jl +++ b/src/codegen/json.jl @@ -24,6 +24,11 @@ JSON target to feed [`codegen`](@ref). """ struct JSONTarget <: CodegenTarget end +""" +A special target for link location, this ensure type-stability. +""" +struct JSONLocationTarget <: CodegenTarget end + function codegen_children(t::JSONTarget, ast::AST.NorgDocument, node::Node) res = [] for c in children(node) @@ -166,7 +171,7 @@ function codegen(t::JSONTarget, ::Link, ast::NorgDocument, node::Node) elseif kind(first(node.children)) == K"TimestampLocation" text = textify(ast, first(node.children)) else - text = [OrderedDict(["t"=>"Str", "c"=>codegen(t, ast, first(node.children))])] + text = [OrderedDict(["t"=>"Str", "c"=>codegen(JSONLocationTarget(), ast, first(node.children))])] end if kind(first(node.children)) == K"TimestampLocation" OrderedDict([ @@ -174,7 +179,7 @@ function codegen(t::JSONTarget, ::Link, ast::NorgDocument, node::Node) "c"=>text ]) else - target = codegen(t, ast, first(node.children)) + target = codegen(JSONLocationTarget(), ast, first(node.children)) OrderedDict([ "t"=>"Link" "c"=>[ @@ -186,14 +191,14 @@ function codegen(t::JSONTarget, ::Link, ast::NorgDocument, node::Node) end end -codegen(::JSONTarget, ::URLLocation, ast, node) = textify(ast, node) +codegen(::JSONLocationTarget, ::URLLocation, ast, node) = textify(ast, node) -function codegen(::JSONTarget, ::LineNumberLocation, ast::NorgDocument, node::Node) +function codegen(::JSONLocationTarget, ::LineNumberLocation, ast::NorgDocument, node::Node) # Who are you, people who link to line location ? "#l-$(textify(ast, node))" end -function codegen(t::JSONTarget, ::DetachedModifierLocation, ast::NorgDocument, node::Node) +function codegen(t::JSONLocationTarget, ::DetachedModifierLocation, ast::NorgDocument, node::Node) kindoftarget = kind(first(children(node))) title = textify(ast, last(children(node))) if AST.is_heading(kindoftarget) @@ -209,10 +214,10 @@ function codegen(t::JSONTarget, ::DetachedModifierLocation, ast::NorgDocument, n end end -function codegen(::JSONTarget, ::MagicLocation, ast::NorgDocument, node::Node) +function codegen(::JSONLocationTarget, ::MagicLocation, ast::NorgDocument, node::Node) key = textify(ast, node) if haskey(ast.targets, key) - kindoftarget, targetnoderef = ast.targets[key] + kindoftarget, targetnoderef = ast.targets[key]::Tuple{Kind, Ref{Node}} title = textify(ast, first(children(targetnoderef[]))) if AST.is_heading(kindoftarget) level_num = AST.heading_level(kindoftarget) @@ -230,7 +235,7 @@ function codegen(::JSONTarget, ::MagicLocation, ast::NorgDocument, node::Node) end end -function codegen(t::JSONTarget, ::FileLocation, ast::NorgDocument, node::Node) +function codegen(t::JSONLocationTarget, ::FileLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) if kind(target) == K"FileNorgRootTarget" start = "/" @@ -247,7 +252,7 @@ function codegen(t::JSONTarget, ::FileLocation, ast::NorgDocument, node::Node) start * target_loc * subtarget_loc end -function codegen(t::JSONTarget, ::NorgFileLocation, ast::NorgDocument, node::Node) +function codegen(t::JSONLocationTarget, ::NorgFileLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) if kind(target) == K"FileNorgRootTarget" start = "/" @@ -264,7 +269,7 @@ function codegen(t::JSONTarget, ::NorgFileLocation, ast::NorgDocument, node::Nod start * target_loc * subtarget_loc end -function codegen(t::JSONTarget, ::WikiLocation, ast::NorgDocument, node::Node) +function codegen(t::JSONLocationTarget, ::WikiLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) target_loc = textify(ast, target) if kind(subtarget) == K"None" @@ -275,7 +280,7 @@ function codegen(t::JSONTarget, ::WikiLocation, ast::NorgDocument, node::Node) "/" * target_loc * subtarget_loc end -codegen(::JSONTarget, ::TimestampLocation, ast::NorgDocument, node::Node) = textify(ast, node) +codegen(::JSONLocationTarget, ::TimestampLocation, ast::NorgDocument, node::Node) = textify(ast, node) codegen(t::JSONTarget, ::LinkDescription, ast::NorgDocument, node::Node) = collect(Iterators.flatten(codegen_children(t, ast, node))) @@ -284,7 +289,7 @@ function codegen(t::JSONTarget, ::Anchor, ast::NorgDocument, node::Node) if length(children(node)) == 1 target = "#" else - target = codegen(t, ast, last(children(node))) + target = codegen(JSONLocationTarget(), ast, last(children(node))) end OrderedDict([ "t"=>"Link" diff --git a/src/match/attached_modifiers.jl b/src/match/attached_modifiers.jl index b646d34..cf96ddb 100644 --- a/src/match/attached_modifiers.jl +++ b/src/match/attached_modifiers.jl @@ -32,12 +32,12 @@ freeformattachedmodifier(::InlineCode) = K"FreeFormInlineCode" freeformattachedmodifier(::NullModifier) = K"FreeFormNullModifier" freeformattachedmodifier(::InlineMath) = K"FreeFormInlineMath" freeformattachedmodifier(::Variable) = K"FreeFormVariable" +freeformattachedmodifier(t::T) where {T <: FreeFormAttachedModifier} = attachedmodifier(t) function match_norg(t::T, parents, tokens, i) where {T<:AttachedModifierStrategy} if K"LinkLocation" ∈ parents return MatchNotFound() end - @debug "matching attached modifier" next_i = nextind(tokens, i) next_token = tokens[next_i] prev_i = prevind(tokens, i) @@ -69,7 +69,6 @@ function match_norg(t::T, parents, tokens, i) where {T <: Union{VerbatimAttached if K"LinkLocation" ∈ parents return MatchNotFound() end - @debug "you know where" next_i = nextind(tokens, i) next_token = tokens[next_i] prev_i = prevind(tokens, i) @@ -77,7 +76,6 @@ function match_norg(t::T, parents, tokens, i) where {T <: Union{VerbatimAttached token = tokens[i] # Opening modifier if attachedmodifier(t) ∉ parents && (is_sof(last_token) || is_punctuation(last_token) || is_whitespace(last_token)) && (!is_eof(next_token) && !is_whitespace(next_token)) - @debug "going to open" last_token token next_token if kind(next_token) == K"|" # Edge case: we want to be able to write `|` (verbatim attached # modifiers have higher precedence than free-form attached modifiers) @@ -97,13 +95,11 @@ function match_norg(t::T, parents, tokens, i) where {T <: Union{VerbatimAttached end # Closing modifier elseif attachedmodifier(t) ∈ parents && t isa FreeFormAttachedModifier - @debug "closing free-form" t MatchClosing(attachedmodifier(t), first(parents)==attachedmodifier(t)) elseif attachedmodifier(t) ∈ parents && !is_whitespace(last_token) && (is_eof(next_token) || is_whitespace(next_token) || is_punctuation(next_token)) MatchClosing(attachedmodifier(t), first(parents) == attachedmodifier(t)) # Link modifier elseif !(t isa FreeFormAttachedModifier) && kind(last_token) == K":" && (!is_eof(next_token) && !is_whitespace(next_token)) - @debug "link modifier" prev_prev_i = prevind(tokens, prev_i) if prev_prev_i >= firstindex(tokens) && (is_sof(tokens[prev_prev_i]) || is_punctuation(tokens[prev_prev_i]) || is_whitespace(tokens[prev_prev_i])) MatchFound(attachedmodifier(t)) @@ -111,7 +107,6 @@ function match_norg(t::T, parents, tokens, i) where {T <: Union{VerbatimAttached MatchNotFound() end else - @debug "nah" MatchNotFound() end end diff --git a/src/match/detached_modifier_suffix.jl b/src/match/detached_modifier_suffix.jl index 9fc2691..2259b49 100644 --- a/src/match/detached_modifier_suffix.jl +++ b/src/match/detached_modifier_suffix.jl @@ -1,14 +1,11 @@ function match_norg(::DetachedModifierSuffix, parents, tokens, i) next_i = nextind(tokens, i) next_token = tokens[next_i] - @debug "detachedmodifier match" parents next_token tokens[next_i + 1] if first(parents) == K"NestableItem" && kind(next_token) == K"LineEnding" MatchFound(K"Slide") elseif first(parents) == K"NestableItem" && kind(next_token) == K":" next_token = tokens[nextind(tokens, next_i)] - @debug "maybe indent segment?" if kind(next_token) == K"LineEnding" - @debug "Indent segment" MatchFound(K"IndentSegment") else MatchNotFound() diff --git a/src/match/detached_modifiers.jl b/src/match/detached_modifiers.jl index fa30aa3..afa40c8 100644 --- a/src/match/detached_modifiers.jl +++ b/src/match/detached_modifiers.jl @@ -14,24 +14,23 @@ function match_norg(::Heading, parents, tokens, i) return MatchClosing(first(nestable_parents), false) end new_i = i - level = 0 + current_level = 0 while new_i < lastindex(tokens) && kind(tokens[new_i]) == K"*" new_i = nextind(tokens, new_i) - level += 1 + current_level += 1 end next_token = tokens[new_i] if kind(next_token) == K"Whitespace" # If we are in a standard ranged tag, the relevant parents are those # within the tag. ancestor_headings = filter(is_heading, relevant_parents) - higher_level_ancestor_heading = findfirst(x -> heading_level(x) >= level, ancestor_headings) - @debug "Closing heading ?" relevant_parents higher_level_ancestor_heading + higher_level_ancestor_heading = findfirst(≥(current_level)∘heading_level, ancestor_headings) if !isnothing(higher_level_ancestor_heading) MatchClosing(ancestor_headings[higher_level_ancestor_heading], false) elseif first(relevant_parents) ∈ [K"ParagraphSegment", K"Paragraph"] MatchClosing(first(relevant_parents), false) else - MatchFound(heading_level(level)) + MatchFound(heading_kind(current_level)) end else MatchNotFound() @@ -63,7 +62,6 @@ function match_norg(t::T, parents, tokens, i) where {T<:DelimitingModifier} new_token = tokens[new_i] end if is_delimiting - @debug "Found a delimiter" delimitingmodifier(t) parents if first(parents) ∈ KSet"NorgDocument IndentSegment StandardRangedTagBody" || is_heading(first(parents)) MatchFound(delimitingmodifier(t)) else @@ -77,46 +75,46 @@ function match_norg(t::T, parents, tokens, i) where {T<:DelimitingModifier} end end -function nestable(::Quote, level) - if level<=1 +function nestable(::Quote, l) + if l<=1 K"Quote1" - elseif level == 2 + elseif l == 2 K"Quote2" - elseif level == 3 + elseif l == 3 K"Quote3" - elseif level == 4 + elseif l == 4 K"Quote4" - elseif level == 5 + elseif l == 5 K"Quote5" else K"Quote6" end end -function nestable(::UnorderedList, level) - if level<=1 +function nestable(::UnorderedList, l) + if l<=1 K"UnorderedList1" - elseif level == 2 + elseif l == 2 K"UnorderedList2" - elseif level == 3 + elseif l == 3 K"UnorderedList3" - elseif level == 4 + elseif l == 4 K"UnorderedList4" - elseif level == 5 + elseif l == 5 K"UnorderedList5" else K"UnorderedList6" end end -function nestable(::OrderedList, level) - if level<=1 +function nestable(::OrderedList, l) + if l<=1 K"OrderedList1" - elseif level == 2 + elseif l == 2 K"OrderedList2" - elseif level == 3 + elseif l == 3 K"OrderedList3" - elseif level == 4 + elseif l == 4 K"OrderedList4" - elseif level == 5 + elseif l == 5 K"OrderedList5" else K"OrderedList6" @@ -124,27 +122,26 @@ function nestable(::OrderedList, level) end function match_norg(t::T, parents, tokens, i) where {T<:Nestable} new_i = i - level = 0 + current_level = 0 token = tokens[i] while new_i < lastindex(tokens) && kind(tokens[new_i]) == kind(token) new_i = nextind(tokens, new_i) - level += 1 + current_level += 1 end next_token = tokens[new_i] if kind(next_token) == K"Whitespace" ancestor_nestable = filter(is_nestable, parents) - higher_level_ancestor_id = findfirst(x->nestable_level(x) > level, ancestor_nestable) + higher_level_ancestor_id = findfirst(>(current_level)∘nestable_level, ancestor_nestable) if !isnothing(higher_level_ancestor_id) MatchClosing(ancestor_nestable[higher_level_ancestor_id], false) - elseif first(parents) == nestable(t, level) + elseif first(parents) == nestable(t, current_level) MatchFound(K"NestableItem") - elseif any(nestable_level.(ancestor_nestable) .== level) + elseif any(nestable_level.(ancestor_nestable) .== current_level) MatchClosing(first(parents), false) elseif first(parents) ∈ [K"Paragraph", K"ParagraphSegment"] - @debug "Chérie ça va couper." parents tokens[i] MatchClosing(first(parents), false) else - MatchFound(nestable(t, level)) + MatchFound(nestable(t, current_level)) end else MatchNotFound() diff --git a/src/match/match.jl b/src/match/match.jl index 9d311e4..078599c 100644 --- a/src/match/match.jl +++ b/src/match/match.jl @@ -90,7 +90,6 @@ function force_word_context(parents, tokens, i) end function match_norg(parents, tokens, i) - @debug "Matching..." tokens[i] parents token = tokens[i] m = if force_word_context(parents, tokens, i) match_norg(Word(), parents, tokens, i) @@ -201,7 +200,6 @@ function match_norg(::Whitespace, parents, tokens, i) elseif kind(next_token) == K"$" match_norg(Definition(), parents, tokens, nextind(tokens, i)) elseif kind(next_token) == K"^" - @debug "haha footnote" match_norg(Footnote(), parents, tokens, nextind(tokens, i)) else MatchNotFound() @@ -216,7 +214,6 @@ function match_norg(::LineEnding, parents, tokens, i) if first(parents) == K"NorgDocument" MatchContinue() elseif is_line_ending(prev_token) - @debug "lineEnding" tokens[i] prev_token parents nestable_parents = filter(is_nestable, parents[2:end]) attached_parents = filter(is_attached_modifier, parents) if first(parents) ∈ KSet"IndentSegment StandardRangedTagBody" @@ -294,19 +291,15 @@ end function match_norg(::Minus, parents, tokens, i) prev_token = tokens[prevind(tokens, i)] if is_sof(prev_token) || is_line_ending(prev_token) - possible_node = [ - WeakDelimiter(), - UnorderedList(), - Strikethrough(), - ] - m = MatchNotFound() - for node in possible_node - m = match_norg(node, parents, tokens, i) - if !isnotfound(m) - break - end + m = match_norg(WeakDelimiter(), parents, tokens, i) + if isfound(m) + return m end - m + m = match_norg(UnorderedList(), parents, tokens, i) + if isfound(m) + return m + end + match_norg(Strikethrough(), parents, tokens, i) else match_norg(Strikethrough(), parents, tokens, i) end @@ -315,7 +308,6 @@ end match_norg(::ExclamationMark, parents, tokens, i) = match_norg(Spoiler(), parents, tokens, i) function match_norg(::Circumflex, parents, tokens, i) - @debug "bonjour c'est circumflex" tokens[i] prev_token = tokens[prevind(tokens, i)] m = if is_line_ending(prev_token) || is_sof(prev_token) match_norg(Footnote(), parents, tokens, i) @@ -346,7 +338,6 @@ function match_norg(::Colon, parents, tokens, i) next_token = tokens[next_i] prev_i = prevind(tokens, i) prev_token = tokens[prev_i] - @debug "hey there" kind(prev_token)∈ATTACHED_DELIMITERS prev_token if kind(next_token) ∈ ATTACHED_DELIMITERS m = match_norg(parents, tokens, next_i) if isfound(m) && AST.is_attached_modifier(kind(matched(m))) @@ -441,9 +432,7 @@ tag_to_strategy(::NumberSign) = StrongCarryoverTag() function match_norg(t::Union{CommercialAtSign, Plus, NumberSign}, parents, tokens, i) prev_token = tokens[prevind(tokens, i)] - @debug "Matching" t prev_token if is_sof(prev_token) || is_line_ending(prev_token) - @debug "Prev token is ok" tokens[i] prev_token match_norg(tag_to_strategy(t), parents, tokens, i) elseif is_whitespace(prev_token) prev_i = prevind(tokens, i) @@ -459,7 +448,6 @@ function match_norg(t::Union{CommercialAtSign, Plus, NumberSign}, parents, token end function match_norg(::DollarSign, parents, tokens, i) - @debug "bonjour c'est dollarsign" prev_token = tokens[prevind(tokens, i)] m = if is_line_ending(prev_token) || is_sof(prev_token) match_norg(Definition(), parents, tokens, i) @@ -476,7 +464,6 @@ end function match_norg(::VerticalBar, parents, tokens, i) next_token = tokens[nextind(tokens, i)] prev_token = tokens[prevind(tokens, i)] - @debug "vertical" tokens[i] prev_token next_token if is_sof(prev_token) || is_line_ending(prev_token) match_norg(StandardRangedTag(), parents, tokens, i) elseif kind(next_token) == K"*" diff --git a/src/match/rangeable_detached_modifier.jl b/src/match/rangeable_detached_modifier.jl index e7e808b..ff9f938 100644 --- a/src/match/rangeable_detached_modifier.jl +++ b/src/match/rangeable_detached_modifier.jl @@ -5,7 +5,6 @@ rangeable_from_strategy(::Footnote) = K"Footnote" function match_norg(t::T, parents, tokens, i) where {T<:RangeableDetachedModifier} token = tokens[i] - @debug "okay, matching rangeable" token if kind(token) != rangeable_from_token(t) return MatchNotFound() end @@ -14,7 +13,6 @@ function match_norg(t::T, parents, tokens, i) where {T<:RangeableDetachedModifie next_i = nextind(tokens, i) next_token = tokens[next_i] if kind(token) == K"Whitespace" - @debug "haha, whitespace" parents if first(parents) == K"Slide" MatchFound(rangeable_from_strategy(t)) elseif (K"NestableItem" ∈ parents || AST.is_nestable(first(parents))) && K"Slide" ∉ parents @@ -41,7 +39,6 @@ function match_norg(t::T, parents, tokens, i) where {T<:RangeableDetachedModifie end end elseif kind(token) == rangeable_from_token(t) && kind(next_token) == K"LineEnding" && rangeable_from_strategy(t) ∈ parents - @debug "match ending ranged" nextline_i = consume_until(K"LineEnding", tokens, i) token = tokens[nextline_i] nextline_start_i = if kind(token) == K"Whitespace" @@ -50,20 +47,14 @@ function match_norg(t::T, parents, tokens, i) where {T<:RangeableDetachedModifie nextline_i end token = tokens[nextline_start_i] - @debug "next line starts with" token if kind(token) == rangeable_from_token(t) - @debug "start matching the next line" m = match_norg(t, parents, tokens, nextline_start_i) - @debug "stop matching the next line" - @debug "it matches a" m first(parents) rangeable_from_strategy(t) if isfound(m) && matched(m)==rangeable_from_strategy(t) - @debug "Let's close the current RangeableItem" MatchClosing(first(parents), true) else MatchClosing(first(parents), rangeable_from_strategy(t)==first(parents)) end else - @debug "so we close first parent" first(parents) rangeable_from_strategy(t) MatchClosing(first(parents), rangeable_from_strategy(t)==first(parents)) end else diff --git a/src/match/tags.jl b/src/match/tags.jl index 1001487..ce90162 100644 --- a/src/match/tags.jl +++ b/src/match/tags.jl @@ -7,12 +7,10 @@ body(::StandardRangedTag) = K"StandardRangedTagBody" function match_norg(t::T, parents, tokens, i) where {T <: Tag} i = nextind(tokens, i) token = tokens[i] - @debug "tag match" parents tokens[i] if kind(token) == K"Word" val = Tokens.value(token) if tag(t) ∈ parents && val == "end" next_token = tokens[nextind(tokens, i)] - @debug "encountered end" token next_token if kind(next_token) ∈ KSet"LineEnding EndOfFile" MatchClosing(tag(t), first(parents) ∈ (tag(t), body(t))) else @@ -34,7 +32,6 @@ end function match_norg(::WeakCarryoverTag, parents, tokens, i) token = tokens[nextind(tokens, i)] - @debug "Matching weak carryover tag" if kind(token) == K"Word" nextline = consume_until(K"LineEnding", tokens, i) m = match_norg(parents, tokens, nextline) @@ -50,7 +47,6 @@ end function match_norg(::StrongCarryoverTag, parents, tokens, i) token = tokens[nextind(tokens, i)] - @debug "Matching strong carryover tag" relevant_parents = if K"StandardRangedTag" ∈ parents k = findfirst(parents .== Ref(K"StandardRangedTag"))::Int parents[1:k] diff --git a/src/parser/attachedmodifier.jl b/src/parser/attachedmodifier.jl index c00e321..90b42fa 100644 --- a/src/parser/attachedmodifier.jl +++ b/src/parser/attachedmodifier.jl @@ -33,7 +33,6 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where m = Match.MatchClosing(node_kind) while !is_eof(tokens[i]) m = match_norg([node_kind, parents...], tokens, i) - @debug "attached modifier loop" m if isclosing(m) if consume(m) && consumepost(t) >= 2 for _ in 1:(consumepost(t)-1) @@ -50,7 +49,6 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where push!(children, segment) end end - @debug "hey it's me" m tokens[i] if is_eof(tokens[i]) || (isclosing(m) && matched(m) == K"None") || # Special case for inline code precedence. (isclosing(m) && matched(m) != node_kind && matched(m) ∈ parents) # we've been tricked in thincking we were in a modifier. diff --git a/src/parser/detachedmodifierextensions.jl b/src/parser/detachedmodifierextensions.jl index 1b15731..42e6596 100644 --- a/src/parser/detachedmodifierextensions.jl +++ b/src/parser/detachedmodifierextensions.jl @@ -15,7 +15,7 @@ function parse_norg(::DetachedModifierExtension, parents::Vector{Kind}, tokens:: elseif extension == K"StartDateExtension" parse_norg(StartDateExtension(), parents, tokens, i) else - error("Unhandled detached modifier extension. Token $token.") + error("Unhandled detached modifier extension. Token $(tokens[i]).") end end function parse_norg(::TodoExtension, parents::Vector{Kind}, tokens::Vector{Token}, i) diff --git a/src/parser/detachedmodifiersuffix.jl b/src/parser/detachedmodifiersuffix.jl index 36cb9f2..4e91b58 100644 --- a/src/parser/detachedmodifiersuffix.jl +++ b/src/parser/detachedmodifiersuffix.jl @@ -3,23 +3,22 @@ function parse_norg(::Slide, parents::Vector{Kind}, tokens::Vector{Token}, i) i = consume_until(K"LineEnding", tokens, i) p = [K"Slide", parents...] m = match_norg(p, tokens, i) - @debug "ok fréro j'ai ça." tokens[i] m - child = if isfound(m) + children = if isfound(m) if matched(m) == K"Definition" - parse_norg(Definition(), p, tokens, i) + [parse_norg(Definition(), p, tokens, i)] elseif matched(m) == K"Footnote" - parse_norg(Footnote(), p, tokens, i) + [parse_norg(Footnote(), p, tokens, i)] elseif matched(m) == K"Verbatim" - parse_norg(Verbatim(), p, tokens, i) + [parse_norg(Verbatim(), p, tokens, i)] elseif matched(m) == K"StandardRangedTag" - parse_norg(StandardRangedTag(), p, tokens, i) + [parse_norg(StandardRangedTag(), p, tokens, i)] else - parse_norg(Paragraph(), p, tokens, i) + [parse_norg(Paragraph(), p, tokens, i)] end else - parse_norg(parents, tokens, i) + AST.Node[] end - AST.Node(K"Slide", [child], start, AST.stop(child)) + AST.Node(K"Slide", children, start, AST.stop(last(children))) end function parse_norg(::IndentSegment, parents::Vector{Kind}, tokens::Vector{Token}, i) @@ -31,7 +30,6 @@ function parse_norg(::IndentSegment, parents::Vector{Kind}, tokens::Vector{Token while !is_eof(tokens[i]) m = match_norg(p, tokens, i) - @debug "indent segment loop" m tokens[i] if isclosing(m) break elseif iscontinue(m) diff --git a/src/parser/link.jl b/src/parser/link.jl index 010f6e2..61c5769 100644 --- a/src/parser/link.jl +++ b/src/parser/link.jl @@ -1,3 +1,6 @@ +limit_tokens(tokens, stop) = [tokens[begin:stop]...; EOFToken()]::Vector{Token} + + function parse_norg(::Link, parents::Vector{Kind}, tokens::Vector{Token}, i) start = i i = nextind(tokens, i) @@ -77,8 +80,8 @@ function parse_norg(::URLLocation, parents::Vector{Kind}, tokens::Vector{Token}, i = prevind(tokens, i) end if isclosing(m) && matched(m) != K"URLLocation" && kind(token) != K"}" - p = parse_norg(Paragraph(), parents, [tokens[begin:i]...; EOFToken()], start) - AST.Node(K"None", vcat(getproperty.(p.children, :children)...), start, i) + p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start) + AST.Node(K"None", vcat([c.children for c in p.children]...), start, i) else stop = i i = prevind(tokens, i) @@ -99,9 +102,9 @@ function parse_norg(::LineNumberLocation, parents::Vector{Kind}, tokens::Vector{ i = prevind(tokens, i) end if isclosing(m) && matched(m) != K"LineNumberLocation" && kind(token) != K"}" - p = parse_norg(Paragraph(), parents, [tokens[begin:i]...; EOFToken()], start) + p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start) - AST.Node(K"None", vcat(getproperty.(p.children, :children)...), start, i) + AST.Node(K"None", vcat([c.children for c in p.children]...), start, i) else stop = i i = prevind(tokens, i) @@ -133,7 +136,7 @@ function parse_norg(::DetachedModifierLocation, parents::Vector{Kind}, tokens::V K"Heading4" elseif level == 5 K"Heading5" - elseif level >= 6 + else # level >= 6 K"Heading6" end elseif kind(token) == K"$" @@ -156,13 +159,13 @@ function parse_norg(::DetachedModifierLocation, parents::Vector{Kind}, tokens::V if !consume(m) || is_eof(token) i = prevind(tokens, i) end - p = parse_norg(Paragraph(), parents, [tokens[begin:i]...; EOFToken()], start_heading_title) + p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start_heading_title) if kind(token) == K"}" children = AST.Node[] for (i,c) in enumerate(p.children) append!(children, c.children) if i < lastindex(p.children) - push!(children, AST.Node(K"WordNode", [], c.stop, c.stop)) + push!(children, AST.Node(K"WordNode", AST.Node[], c.stop, c.stop)) end end content = AST.Node(K"ParagraphSegment", children, p.start, p.stop) @@ -194,7 +197,7 @@ function parse_norg(::MagicLocation, parents::Vector{Kind}, tokens::Vector{Token if !consume(m) || is_eof(token) i = prevind(tokens, i) end - p = parse_norg(Paragraph(), parents, [tokens[begin:i]...; EOFToken()], start_heading_title) + p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start_heading_title) if kind(token) == K"}" children = AST.Node[] for (i,c) in enumerate(p.children) @@ -241,9 +244,9 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i,) wher i = prevind(tokens, i) end if isclosing(m) && matched(m) != filelocationkind(t) && kind(token) != K"}" - p = parse_norg(Paragraph(), parents, [tokens[begin:i]...; EOFToken()], start) + p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start) - return AST.Node(K"None", vcat(getproperty.(p.children, :children)...), start, i) + return AST.Node(K"None", vcat([c.children for c in p.children]...), start, i) end if use_neorg_root k = K"FileNorgRootTarget" @@ -267,8 +270,8 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i,) wher i = prevind(tokens, i) end if isclosing(m) && matched(m) != filelocationkind(t) && kind(token) != K"}" - p = parse_norg(Paragraph(), parents, [tokens[begin:i]...; EOFToken()], start) - return AST.Node(K"None", vcat(getproperty.(p.children, :children)...), start, i) + p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start) + return AST.Node(K"None", vcat([c.children for c in p.children]...), start, i) end else i = AST.stop(subtarget) @@ -296,7 +299,7 @@ function parse_norg(::WikiLocation, parents::Vector{Kind}, tokens::Vector{Token} if !consume(m) || is_eof(token) i = prevind(tokens, i) end - p = parse_norg(Paragraph(), parents, [tokens[begin:i]...; EOFToken()], start_heading_title) + p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start_heading_title) subtarget = AST.Node(K"None") content = AST.Node(K"None") if kind(token) ∈ KSet"} :" @@ -328,8 +331,8 @@ function parse_norg(::WikiLocation, parents::Vector{Kind}, tokens::Vector{Token} i = prevind(tokens, i) end if isclosing(m) && matched(m) != K"WikiLocation" && kind(token) != K"}" - p = parse_norg(Paragraph(), parents, [tokens[begin:i]...; EOFToken()], start) - return AST.Node(K"None", vcat(getproperty.(p.children, :children)...), start, i) + p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start) + return AST.Node(K"None", vcat([c.children for c in p.children]...), start, i) end else i = AST.stop(subtarget) @@ -358,8 +361,8 @@ function parse_norg(::TimestampLocation, parents::Vector{Kind}, tokens::Vector{T i = prevind(tokens, i) end if isclosing(m) && matched(m) != K"TimestampLocation" && kind(token) != K"}" - p = parse_norg(Paragraph(), parents, [tokens[begin:i]...; EOFToken()], start) - AST.Node(K"None", vcat(getproperty.(p.children, :children)...), start, i) + p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start) + AST.Node(K"None", vcat([c.children for c in p.children]...), start, i) else stop = i i = prevind(tokens, i) diff --git a/src/parser/nestablemodifier.jl b/src/parser/nestablemodifier.jl index 48d2722..ef0d42b 100644 --- a/src/parser/nestablemodifier.jl +++ b/src/parser/nestablemodifier.jl @@ -14,7 +14,6 @@ function parse_norg(::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where end break end - @debug "nestable loop" m tokens[i] child = if kind(matched(m)) == K"WeakCarryoverTag" parse_norg(WeakCarryoverTag(), [nestable_kind, parents...], tokens, i) else @@ -50,7 +49,6 @@ function parse_norg(::NestableItem, parents::Vector{Kind}, tokens::Vector{Token} end while !is_eof(tokens[i]) m = match_norg([K"NestableItem", parents...], tokens, i) - @debug "nestable item loop" m tokens[i] if isclosing(m) if !consume(m) i = prevind(tokens, i) diff --git a/src/parser/parser.jl b/src/parser/parser.jl index 741aeb0..d2f5b9f 100644 --- a/src/parser/parser.jl +++ b/src/parser/parser.jl @@ -81,7 +81,6 @@ function parse_norg(tokens::Vector{Token}) children = AST.Node[] while !is_eof(tokens[i]) child = parse_norg_toplevel_one_step([K"NorgDocument"], tokens, i) - @debug "toplevel" i child tokens[i] i = AST.stop(child) if !is_eof(tokens[i]) i = nextind(tokens, i) @@ -102,7 +101,6 @@ function parse_norg(::Paragraph, parents::Vector{Kind}, tokens::Vector{Token}, i start = i while !is_eof(tokens[i]) m = match_norg([K"Paragraph", parents...], tokens, i) - @debug "paragraph loop" m tokens[i] if isclosing(m) break elseif iscontinue(m) @@ -207,10 +205,9 @@ function parse_norg(::ParagraphSegment, parents::Vector{Kind}, tokens::Vector{To children = AST.Node[] m = Match.MatchClosing(K"ParagraphSegment") parents = [K"ParagraphSegment", parents...] - siblings = [] + siblings = AST.Node[] while !is_eof(tokens[i]) m = match_norg(parents, tokens, i) - @debug "ps loop" m if isclosing(m) break elseif iscontinue(m) diff --git a/src/parser/rangeabledetachedmodifier.jl b/src/parser/rangeabledetachedmodifier.jl index 210ffcf..c1f9a6a 100644 --- a/src/parser/rangeabledetachedmodifier.jl +++ b/src/parser/rangeabledetachedmodifier.jl @@ -3,17 +3,14 @@ strategy_to_kind(::Footnote) = K"Footnote" function parse_norg(t::RangeableDetachedModifier, parents::Vector{Kind}, tokens::Vector{Token}, i) start = i parents = [strategy_to_kind(t), parents...] - children = [] + children = AST.Node[] while !is_eof(tokens[i]) - @debug "Ranged mainloop" tokens[i] m = match_norg(parents, tokens, i) - @debug "Ranged matched" m if isclosing(m) if !consume(m) i = prevind(tokens, i) else stop = prevind(tokens, consume_until(K"LineEnding", tokens, i)) - @debug "Consuming until" tokens[stop] if !isempty(children) child = last(children) children[end] = AST.Node(K"RangeableItem", child.children, AST.start(child), stop) @@ -22,7 +19,6 @@ function parse_norg(t::RangeableDetachedModifier, parents::Vector{Kind}, tokens: end break elseif matched(m) ∉ KSet"WeakCarryoverTag RangeableItem" - @debug "Hugo, I'm leaving on" tokens[i] i = prevind(tokens, i) break end @@ -60,7 +56,6 @@ function parse_norg(::RangeableItem, parents::Vector{Kind}, tokens::Vector{Token end function parse_norg_unranged_rangeable(parents, tokens, i) - @debug "unranged rangeable" parents tokens[i] title_segment = parse_norg(ParagraphSegment(), parents, tokens, i) paragraph = parse_norg(Paragraph(), parents, tokens, nextind(tokens, AST.stop(title_segment))) @@ -68,7 +63,6 @@ function parse_norg_unranged_rangeable(parents, tokens, i) end function parse_norg_ranged_rangeable(parents, tokens, i) - @debug "ranged rangeable" parents tokens[i] start = i title_segment = parse_norg(ParagraphSegment(), parents, tokens, i) children = [] @@ -76,13 +70,10 @@ function parse_norg_ranged_rangeable(parents, tokens, i) token = tokens[i] while !is_eof(token) m = match_norg(parents, tokens, i) - @debug "ranged item loop" token m if isclosing(m) - @debug "ok, closing ranged item" m tokens[i] if consume(m) i = consume_until(K"LineEnding", tokens, i) i = prevind(tokens, i) - @debug "consuming until" i tokens[i] else i = prevind(tokens, i) end @@ -114,5 +105,5 @@ function parse_norg_ranged_rangeable(parents, tokens, i) token = tokens[i] end end - AST.Node(K"RangeableItem", [title_segment, children...], start, i) + AST.Node(K"RangeableItem", [title_segment, children...]::Vector{AST.Node}, start, i) end diff --git a/src/parser/structuralmodifier.jl b/src/parser/structuralmodifier.jl index 740ce41..41c1722 100644 --- a/src/parser/structuralmodifier.jl +++ b/src/parser/structuralmodifier.jl @@ -12,7 +12,7 @@ function parse_norg(::Heading, parents::Vector{Kind}, tokens::Vector{Token}, i) token = tokens[i] heading_level += 1 end - heading_kind = AST.heading_level(heading_level) + heading_kind = AST.heading_kind(heading_level) if is_whitespace(token) i = nextind(tokens, i) m = match_norg([heading_kind, parents...], tokens, i) @@ -30,7 +30,6 @@ function parse_norg(::Heading, parents::Vector{Kind}, tokens::Vector{Token}, i) i = nextind(tokens, AST.stop(title_segment)) while !is_eof(tokens[i]) m = match_norg([heading_kind, parents...], tokens, i) - @debug "heading loop" m tokens[i] if isclosing(m) break end diff --git a/src/parser/tag.jl b/src/parser/tag.jl index 542840e..59f1240 100644 --- a/src/parser/tag.jl +++ b/src/parser/tag.jl @@ -21,7 +21,6 @@ function parse_tag_header(parents::Vector{Kind}, tokens::Vector{Token}, i) token = tokens[i] end end - @debug "coucou" token if kind(token) == K"Whitespace" i = nextind(tokens, i) token = tokens[i] @@ -65,17 +64,13 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where stop_content = i p = [body(t), tag(t), parents...] body_children = AST.Node[] - @debug "tag parsing" start start_content tokens[i] while !is_eof(tokens[i]) m = match_norg(p, tokens, i) - @debug "tag loop" m tokens[i] if isclosing(m) - @debug "Closing tag" m tokens[i] stop_content = prevind(tokens, i) if kind(tokens[i]) == K"LineEnding" i = nextind(tokens, i) end - @debug "after advancing" tokens[i] i = consume_until(K"LineEnding", tokens, i) if tokens[i] != K"EndOfFile" i = prevind(tokens, i) @@ -90,16 +85,13 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where i = nextind(tokens, AST.stop(c)) end push!(children, AST.Node(body(t), body_children, start_content, stop_content)) - @debug "Closed tag" i tokens[i] parents AST.Node(tag(t), children, start, i) end function parse_norg(::WeakCarryoverTag, parents::Vector{Kind}, tokens::Vector{Token}, i) start = i children, i = parse_tag_header(parents, tokens, i) - @debug "Weak carryover tag here" tokens[i] - content = parse_norg_toplevel_one_step([parents...], tokens, i) - @debug "hey there" content parents + content = parse_norg_toplevel_one_step(parents, tokens, i) if kind(content) == K"Paragraph" || is_nestable(kind(content)) content_children = content.children first_segment = first(content_children) @@ -113,13 +105,11 @@ end function parse_norg(::StrongCarryoverTag, parents::Vector{Kind}, tokens::Vector{Token}, i) start = i children, i = parse_tag_header(parents, tokens, i) - @debug "Strong carryover tag here" tokens[i] m = match_norg(parents, tokens, i) if isclosing(m) AST.Node(K"StrongCarryoverTag", children, start, prevind(tokens, i)) else - content = parse_norg_toplevel_one_step([parents...], tokens, i) - @debug "hey there" content parents + content = parse_norg_toplevel_one_step(parents, tokens, i) AST.Node(K"StrongCarryoverTag", [children..., content], start, AST.stop(content)) end end diff --git a/src/tokens.jl b/src/tokens.jl index a953e0b..8b97f18 100644 --- a/src/tokens.jl +++ b/src/tokens.jl @@ -49,7 +49,7 @@ function Token(kind, line, char, value) end function Base.show(io::IO, token::Token) print(io, - "$(kind(token)): $(repr(value(token))), line $(line(token)) col. $(char(token))") + "Token(K\"$(convert(String, kind(token)))\", \"$(value(token))\", line $(string(line(token))), col. $(string(char(token))))") end SOFToken() = Token(K"StartOfFile", 0, 0, SubString("")) EOFToken() = Token(K"EndOfFile", 0, 0, SubString("")) diff --git a/src/utils.jl b/src/utils.jl index b751a95..a3bf439 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -96,9 +96,12 @@ attribute of the AST first. """ function findtargets!(ast::NorgDocument) empty!(ast.targets) - for c in children(ast.root) - map(PreOrderDFS(x->kind(x) ∉ KSet"Link Anchor", c)) do n - findtargets!(ast, n) + stack = copy(children(ast.root)) + while !isempty(stack) + c = pop!(stack) + findtargets!(ast, c) + if kind(c) ∉ KSet"Link Anchor" + append!(stack, children(c)) end end end diff --git a/test/ast_tests/test_headings.jl b/test/ast_tests/test_headings.jl index 18b9ea7..c38e0b7 100644 --- a/test/ast_tests/test_headings.jl +++ b/test/ast_tests/test_headings.jl @@ -49,7 +49,7 @@ And here is some more text that has broken out of the matrix. h1 = children(ast.root)[2] p = last(children(ast.root)) - @test kind(hi) == AST.heading_level(i) + @test kind(hi) == AST.heading_kind(i) @test kind(h1) == K"Heading1" @test kind(p) == K"Paragraph" @@ -59,7 +59,7 @@ And here is some more text that has broken out of the matrix. hj = children(h1)[2] for j in 2:i - @test kind(hj) == AST.heading_level(j) + @test kind(hj) == AST.heading_kind(j) hj_title = first(children(hj)) @test kind(hj_title) == K"ParagraphSegment" @test length(children(hj_title)) == 9 diff --git a/test/code_analysis_tests/test_jet.jl b/test/code_analysis_tests/test_jet.jl index 5e86914..ffecb4f 100644 --- a/test/code_analysis_tests/test_jet.jl +++ b/test/code_analysis_tests/test_jet.jl @@ -1,4 +1,4 @@ -using JET +using JET, AbstractTrees, OrderedCollections @testset "JET.jl -> See https://aviatesk.github.io/JET.jl/stable/jetanalysis/#Errors-kinds-and-how-to-fix-them" begin payload = open(Norg.NORG_SPEC_PATH, "r") do f @@ -8,19 +8,19 @@ end # Error analysis # Parse the entire spec -@test_call mode=:sound norg(payload) +@test_call ignored_modules=(AbstractTrees, Base) norg(payload) ast = norg(payload) # HTML codegen -@test_call mode=:sound Norg.codegen(HTMLTarget(), ast) +@test_call ignored_modules=(AbstractTrees, Base) Norg.codegen(HTMLTarget(), ast) # JSON codegen -@test_call mode=:sound Norg.codegen(JSONTarget(), ast) +@test_call ignored_modules=(AbstractTrees, Base) Norg.codegen(JSONTarget(), ast) # Optimization analysis # Parsing -@test_opt norg(payload) +@test_opt ignored_modules=(AbstractTrees, Base) norg(payload) # Codegen -@test_opt Norg.codegen(HTMLTarget(), payload) -@test_opt Norg.codegen(JSONTarget(), payload) +@test_opt ignored_modules=(AbstractTrees, Base) Norg.codegen(HTMLTarget(), ast) +@test_opt ignored_modules=(AbstractTrees, OrderedCollections, Base) Norg.codegen(JSONTarget(), ast) end From dd29680e9c453409c3f833f78ee71176a4d03547 Mon Sep 17 00:00:00 2001 From: Hugo Levy-Falk Date: Sun, 17 Sep 2023 20:15:13 +0200 Subject: [PATCH 3/9] feat: First pass on JET.jl tests. --- src/codegen/json.jl | 404 +++++++++++++++------------ src/parser/link.jl | 6 +- src/utils.jl | 10 +- test/code_analysis_tests/test_jet.jl | 4 +- test/codegen_tests/json.jl | 56 ++-- 5 files changed, 264 insertions(+), 216 deletions(-) diff --git a/src/codegen/json.jl b/src/codegen/json.jl index baaafc1..04d9e11 100644 --- a/src/codegen/json.jl +++ b/src/codegen/json.jl @@ -7,7 +7,6 @@ follow the Pandoc JSON AST API. You can then export using *e.g.* [JSON.jl](https """ module JSONCodegen using Base: CacheHeaderIncludes -using OrderedCollections using AbstractTrees using ..AST @@ -19,18 +18,34 @@ import ..codegen import ..textify import ..idify +jsonify(p::Pair{Symbol, Symbol}) = "$(jsonify(first(p))):$(jsonify(last(p)))" +jsonify(p::Pair{Symbol, Int}) = "$(jsonify(first(p))):$(jsonify(last(p)))" +jsonify(p::Pair{Symbol, String}) = "$(jsonify(first(p))):$(jsonify(last(p)))" +jsonify(a::Vector{Pair{Symbol, String}}) = "{"*join(jsonify.(a)::Vector{String}, ",")*"}" +jsonify(a::Vector) = "["*join(jsonify.(a), ",")*"]" +jsonify(x::String) = x +jsonify(x::Int) = string(x) +jsonify(x::Symbol) = "\"$(x)\"" + """ JSON target to feed [`codegen`](@ref). + +You can specify a pandoc api version, but this only changes the version number +announced in the generated output. """ -struct JSONTarget <: CodegenTarget end +struct JSONTarget <: CodegenTarget + pandocapiversion::Vector{Int} +end +JSONTarget() = JSONTarget([1, 23]) """ A special target for link location, this ensure type-stability. """ struct JSONLocationTarget <: CodegenTarget end +codegen(::JSONLocationTarget, _, _, _) = error("Trying to generate a non location node with target `JSONLocationTarget`. You found a bug in JSON code generation.") function codegen_children(t::JSONTarget, ast::AST.NorgDocument, node::Node) - res = [] + res = String[] for c in children(node) r = codegen(t, ast, c) if !isempty(r) @@ -41,120 +56,116 @@ function codegen_children(t::JSONTarget, ast::AST.NorgDocument, node::Node) end function codegen(t::JSONTarget, ast::AST.NorgDocument) - OrderedDict([ - "pandoc-api-version" => [1, 23] - "meta" => OrderedDict{String, String}() - "blocks" => codegen_children(t, ast, ast.root) + jsonify([ + Symbol("pandoc-api-version") => jsonify(t.pandocapiversion) + :meta => "{}" + :blocks => jsonify(codegen_children(t, ast, ast.root)) ]) end function codegen(t::JSONTarget, ::Paragraph, ast::NorgDocument, node::Node) - res = [] + res = String[] for c in children(node) r = codegen(t, ast, c) if !isempty(r) - if r isa Vector - append!(res, r) - else - push!(res, r) - end - push!(res, OrderedDict{String, Any}("t" => "SoftBreak")) + push!(res, r) + push!(res, jsonify([:t => jsonify(:SoftBreak)])) end end if !isempty(res) pop!(res) # remove last softbreak end - OrderedDict([ - "t" => "Para" - "c" => res + jsonify([ + :t => jsonify(:Para) + :c => jsonify(res) ]) end -codegen(t::JSONTarget, ::ParagraphSegment, ast::NorgDocument, node::Node) = codegen_children(t, ast, node) +codegen(t::JSONTarget, ::ParagraphSegment, ast::NorgDocument, node::Node) = join(codegen_children(t, ast, node), ",") -pandoc_t(::Union{FreeFormBold, Bold}) = "Strong" -pandoc_t(::Union{FreeFormItalic, Italic}) = "Emph" -pandoc_t(::Union{FreeFormUnderline, Underline}) = "Underline" -pandoc_t(::Union{FreeFormStrikethrough, Strikethrough}) = "Strikeout" -pandoc_t(::Union{FreeFormSpoiler, Spoiler}) = "Span" -pandoc_t(::Union{FreeFormSuperscript, Superscript}) = "Superscript" -pandoc_t(::Union{FreeFormSubscript, Subscript}) = "Subscript" -pandoc_t(::Union{FreeFormInlineCode, InlineCode}) = "Code" +pandoc_t(::Union{FreeFormBold, Bold}) = :Strong +pandoc_t(::Union{FreeFormItalic, Italic}) = :Emph +pandoc_t(::Union{FreeFormUnderline, Underline}) = :Underline +pandoc_t(::Union{FreeFormStrikethrough, Strikethrough}) = :Strikeout +pandoc_t(::Union{FreeFormSpoiler, Spoiler}) = :Span +pandoc_t(::Union{FreeFormSuperscript, Superscript}) = :Superscript +pandoc_t(::Union{FreeFormSubscript, Subscript}) = :Subscript +pandoc_t(::Union{FreeFormInlineCode, InlineCode}) = :Code pandoc_attr(::Union{FreeFormBold, Bold}) = [] pandoc_attr(::Union{FreeFormItalic, Italic}) = [] pandoc_attr(::Union{FreeFormUnderline, Underline}) = [] pandoc_attr(::Union{FreeFormStrikethrough, Strikethrough}) = [] -pandoc_attr(::Union{FreeFormSpoiler, Spoiler}) = ["", ["spoiler"], []] +pandoc_attr(::Union{FreeFormSpoiler, Spoiler}) = ["\"\"", jsonify(["\"spoiler\""]), jsonify([])] pandoc_attr(::Union{FreeFormSuperscript, Superscript}) = [] pandoc_attr(::Union{FreeFormSubscript, Subscript}) = [] -pandoc_attr(::Union{FreeFormInlineCode, InlineCode}) = ["", [], []] +pandoc_attr(::Union{FreeFormInlineCode, InlineCode}) = ["\"\"", jsonify([]), jsonify([])] function codegen(t::JSONTarget, s::T, ast::NorgDocument, node::Node) where {T<:AttachedModifierStrategy} - res = [] + res = String[] for c in children(node) r = codegen(t, ast, c) if !isempty(r) - append!(res, r) - push!(res, OrderedDict{String, Any}("t" => "SoftBreak")) + push!(res, r) + push!(res, jsonify([:t => jsonify(:SoftBreak)])) end end attr = pandoc_attr(s) if isempty(attr) - OrderedDict([ - "t"=>pandoc_t(s) - "c" => res + jsonify([ + :t => jsonify(pandoc_t(s)) + :c => jsonify(res) ]) else - OrderedDict([ - "t"=>pandoc_t(s) - "c" => [attr, res] + jsonify([ + :t => jsonify(pandoc_t(s)) + :c => jsonify([jsonify(attr), jsonify(res)]) ]) end end function codegen(::JSONTarget, ::Union{InlineMath, FreeFormInlineMath}, ast::NorgDocument, node::Node) - OrderedDict([ - "t"=>"Math" - "c" => [OrderedDict(["t"=>"InlineMath"]), textify(ast, node)] + jsonify([ + :t=>jsonify(:Math) + :c => jsonify([jsonify([:t=>jsonify(:InlineMath)]), "\""*textify(ast, node, escape_string)*"\""]) ]) end function codegen(::JSONTarget, ::Union{Variable, FreeFormVariable}, ::NorgDocument, ::Node) - [] + "" end function codegen(::JSONTarget, ::Union{NullModifier, FreeFormNullModifier}, ::NorgDocument, ::Node) - [] + "" end -function codegen(t::JSONTarget, s::Union{InlineCode, FreeFormInlineCode}, ast::NorgDocument, node::Node) - OrderedDict([ - "t"=>pandoc_t(s) - "c" => [pandoc_attr(s), textify(ast, node)] +function codegen(::JSONTarget, s::Union{InlineCode, FreeFormInlineCode}, ast::NorgDocument, node::Node) + jsonify([ + :t=> jsonify(pandoc_t(s)) + :c => jsonify([jsonify(pandoc_attr(s)), "\""*textify(ast, node, escape_string)*"\""]) ]) end function codegen(t::JSONTarget, ::Word, ast::NorgDocument, node::Node) if is_leaf(node) && (AST.stop(node) - AST.start(node) > 0) - OrderedDict([ - "t"=>"Str" - "c"=>AST.litteral(ast, node) + jsonify([ + :t=>jsonify(:Str) + :c=>"\"$(textify(ast, node, escape_string))\"" ]) elseif is_leaf(node) token = first(ast.tokens[AST.start(node):AST.stop(node)]) if Tokens.is_whitespace(token) - OrderedDict([ - "t"=>"Space" + jsonify([ + :t=>jsonify(:Space) ]) else - OrderedDict([ - "t"=>"Str" - "c"=>AST.litteral(ast, node) + jsonify([ + :t=>jsonify(:Str) + :c=>"\"$(textify(ast, node, escape_string))\"" ]) end else - [codegen(t, Word(), ast, c) for c in children(node)] + jsonify([codegen(t, Word(), ast, c) for c in children(node)]) end end codegen(t::JSONTarget, ::Escape, ast, node) = codegen(t, ast, first(children(node))) @@ -163,44 +174,50 @@ function codegen(t::JSONTarget, ::Link, ast::NorgDocument, node::Node) if length(node.children) > 1 text = codegen(t, ast, last(node.children)) elseif kind(first(node.children)) == K"DetachedModifierLocation" - text = codegen(t, ast, children(first(children(node)))[2]) + text = jsonify([codegen(t, ast, children(first(children(node)))[2])]) elseif kind(first(node.children)) == K"MagicLocation" - text = codegen(t, ast, children(first(children(node)))[1]) + text = jsonify([codegen(t, ast, children(first(children(node)))[1])]) elseif kind(first(node.children)) == K"WikiLocation" - text = codegen(t, ast, children(first(children(node)))[1]) + text = jsonify([codegen(t, ast, children(first(children(node)))[1])]) elseif kind(first(node.children)) == K"TimestampLocation" - text = textify(ast, first(node.children)) + text = "\""*textify(ast, first(node.children), escape_string)*"\"" else - text = [OrderedDict(["t"=>"Str", "c"=>codegen(JSONLocationTarget(), ast, first(node.children))])] + text = jsonify([jsonify([:t=>jsonify(:Str), :c=>"\""*codegen(JSONLocationTarget(), ast, first(node.children))*"\""])]) end if kind(first(node.children)) == K"TimestampLocation" - OrderedDict([ - "t"=>"Str" - "c"=>text + jsonify([ + :t=>jsonify(:Str) + :c=>text ]) else target = codegen(JSONLocationTarget(), ast, first(node.children)) - OrderedDict([ - "t"=>"Link" - "c"=>[ - ["", Any[], Any[]], + jsonify([ + :t=>jsonify(:Link) + :c=>jsonify([ + jsonify([Symbol(""), jsonify(String[]), jsonify(String[])]), text, - [target, ""] - ] + jsonify(["\""*target*"\"", Symbol("")]) + ]) ]) end end -codegen(::JSONLocationTarget, ::URLLocation, ast, node) = textify(ast, node) +# fallback +codegen(::JSONTarget, ::URLLocation, ast, node) = error("You found a bug in URL location JSON code generation.") +codegen(::JSONLocationTarget, ::URLLocation, ast, node) = textify(ast, node, escape_string) +# fallback +codegen(::JSONTarget, ::LineNumberLocation, ast, node) = error("You found a bug in line number location JSON code generation.") function codegen(::JSONLocationTarget, ::LineNumberLocation, ast::NorgDocument, node::Node) # Who are you, people who link to line location ? - "#l-$(textify(ast, node))" + "#l-$(textify(ast, node, escape_string))" end -function codegen(t::JSONLocationTarget, ::DetachedModifierLocation, ast::NorgDocument, node::Node) +# fallback +codegen(::JSONTarget, ::DetachedModifierLocation, ast, node) = error("You found a bug in detached modifier location JSON code generation.") +function codegen(::JSONLocationTarget, ::DetachedModifierLocation, ast::NorgDocument, node::Node) kindoftarget = kind(first(children(node))) - title = textify(ast, last(children(node))) + title = textify(ast, last(children(node)), escape_string) if AST.is_heading(kindoftarget) level_num = AST.heading_level(first(children(node))) level = "h" * string(level_num) @@ -214,11 +231,13 @@ function codegen(t::JSONLocationTarget, ::DetachedModifierLocation, ast::NorgDoc end end +# fallback +codegen(::JSONTarget, ::MagicLocation, ast, node) = error("You found a bug in magic location JSON code generation.") function codegen(::JSONLocationTarget, ::MagicLocation, ast::NorgDocument, node::Node) - key = textify(ast, node) + key = textify(ast, node, escape_string) if haskey(ast.targets, key) kindoftarget, targetnoderef = ast.targets[key]::Tuple{Kind, Ref{Node}} - title = textify(ast, first(children(targetnoderef[]))) + title = textify(ast, first(children(targetnoderef[]::Node)::Vector{Node}), escape_string) if AST.is_heading(kindoftarget) level_num = AST.heading_level(kindoftarget) level = "h" * string(level_num) @@ -235,6 +254,8 @@ function codegen(::JSONLocationTarget, ::MagicLocation, ast::NorgDocument, node: end end +# fallback +codegen(::JSONTarget, ::FileLocation, ast, node) = error("You found a bug in file location JSON code generation.") function codegen(t::JSONLocationTarget, ::FileLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) if kind(target) == K"FileNorgRootTarget" @@ -242,7 +263,7 @@ function codegen(t::JSONLocationTarget, ::FileLocation, ast::NorgDocument, node: else start = "" end - target_loc = textify(ast, target) + target_loc = textify(ast, target, escape_string) if kind(subtarget) == K"None" subtarget_loc = "" else @@ -252,6 +273,8 @@ function codegen(t::JSONLocationTarget, ::FileLocation, ast::NorgDocument, node: start * target_loc * subtarget_loc end +# fallback +codegen(::JSONTarget, ::NorgFileLocation, ast, node) = error("You found a bug in norg file location JSON code generation.") function codegen(t::JSONLocationTarget, ::NorgFileLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) if kind(target) == K"FileNorgRootTarget" @@ -259,7 +282,7 @@ function codegen(t::JSONLocationTarget, ::NorgFileLocation, ast::NorgDocument, n else start = "" end - target_loc = textify(ast, target) + target_loc = textify(ast, target, escape_string) if kind(subtarget) == K"None" subtarget_loc = "" else @@ -269,9 +292,11 @@ function codegen(t::JSONLocationTarget, ::NorgFileLocation, ast::NorgDocument, n start * target_loc * subtarget_loc end +# fallback +codegen(::JSONTarget, ::WikiLocation, ast, node) = error("You found a bug in wiki location JSON code generation.") function codegen(t::JSONLocationTarget, ::WikiLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) - target_loc = textify(ast, target) + target_loc = textify(ast, target, escape_string) if kind(subtarget) == K"None" subtarget_loc = "" else @@ -280,9 +305,11 @@ function codegen(t::JSONLocationTarget, ::WikiLocation, ast::NorgDocument, node: "/" * target_loc * subtarget_loc end -codegen(::JSONLocationTarget, ::TimestampLocation, ast::NorgDocument, node::Node) = textify(ast, node) +# fallback +codegen(::JSONTarget, ::TimestampLocation, ast, node) = error("You found a bug in timestamp location JSON code generation.") +codegen(::JSONLocationTarget, ::TimestampLocation, ast::NorgDocument, node::Node) = textify(ast, node, escape_string) -codegen(t::JSONTarget, ::LinkDescription, ast::NorgDocument, node::Node) = collect(Iterators.flatten(codegen_children(t, ast, node))) +codegen(t::JSONTarget, ::LinkDescription, ast::NorgDocument, node::Node) = jsonify(codegen_children(t, ast, node)) function codegen(t::JSONTarget, ::Anchor, ast::NorgDocument, node::Node) text = codegen(t, ast, first(node.children)) @@ -291,35 +318,35 @@ function codegen(t::JSONTarget, ::Anchor, ast::NorgDocument, node::Node) else target = codegen(JSONLocationTarget(), ast, last(children(node))) end - OrderedDict([ - "t"=>"Link" - "c"=>[ - ["", Any[], Any[]], + jsonify([ + :t=>jsonify(:Link) + :c=>jsonify([ + jsonify([Symbol(""), jsonify([]), jsonify([])]), text, - [target, ""] - ] + jsonify(["\""*target*"\"", Symbol("")]) + ]) ]) end function codegen(t::JSONTarget, ::InlineLinkTarget, ast::NorgDocument, node::Node) - text = [] + text = String[] for c in children(node) r = codegen(t, ast, c) if !isempty(r) - append!(text, r) + push!(text, r) push!(text, " ") end end if !isempty(text) pop!(text) # remove last space end - id = idify(join(textify(ast, node))) - OrderedDict([ - "t"=>"Span" - "c"=>[ - [id, Any[], Any[]], + id = idify(join(textify(ast, node, escape_string))) + jsonify([ + :t=>jsonify(:Span) + :c=>jsonify([ + jsonify(["\""*id*"\"", jsonify([]), jsonify(Any[])]), text - ] + ]) ]) end @@ -335,81 +362,79 @@ function codegen(t::JSONTarget, ::Heading, ast::NorgDocument, node::Node) else heading_title, heading_content... = codegen_children(t, ast, node) end - title = textify(ast, heading_title_node) - id_title = idify(level * " " * title) - heading = OrderedDict([ - "t"=>"Header" - "c"=>[level_num, [id_title, [], []], heading_title] + title = "\""*textify(ast, heading_title_node, escape_string)*"\"" + id_title = "\""*idify(level * " " * title)*"\"" + heading = jsonify([ + :t=>jsonify(:Header) + :c=>jsonify([level_num, jsonify([id_title, jsonify([]), jsonify([])]), jsonify([heading_title])]) ]) - id_section = idify("section " * id_title) - OrderedDict([ - "t"=>"Div" - "c"=>[[id_section, [], []], [heading, heading_content...]] + id_section = "\""*idify("section " * id_title)*"\"" + jsonify([ + :t=>jsonify(:Div) + :c=>jsonify([ + jsonify([id_section, jsonify([]), jsonify([])]), + jsonify([heading, heading_content...]) ]) + ]) end -codegen(::JSONTarget, ::StrongDelimiter, ast::NorgDocument, node::Node) = OrderedDict() -codegen(::JSONTarget, ::WeakDelimiter, ast::NorgDocument, node::Node) = OrderedDict() -codegen(::JSONTarget, ::HorizontalRule, ast::NorgDocument, node::Node) = OrderedDict(["t"=>"HorizontalRule", "c"=>[]]) +codegen(::JSONTarget, ::StrongDelimiter, ast::NorgDocument, node::Node) = "" +codegen(::JSONTarget, ::WeakDelimiter, ast::NorgDocument, node::Node) = "" +codegen(::JSONTarget, ::HorizontalRule, ast::NorgDocument, node::Node) = "" function codegen_nestable_children(t::JSONTarget, ast::NorgDocument, node::Node) res = [] for c in children(node) r = codegen(t, ast, c) if !isempty(r) - if kind(c) == K"NestableItem" - push!(res, r) - else - push!(res, [r]) - end + push!(res, r) end end res end function codegen(t::JSONTarget, ::UnorderedList, ast::NorgDocument, node::Node) - OrderedDict([ - "t"=>"BulletList" - "c"=>codegen_nestable_children(t, ast, node) + jsonify([ + :t=>jsonify(:BulletList) + :c=>jsonify(codegen_nestable_children(t, ast, node)) ]) end function codegen(t::JSONTarget, ::OrderedList, ast::NorgDocument, node::Node) - OrderedDict([ - "t"=>"OrderedList" - "c"=>[ - [1, OrderedDict(["t"=>"Decimal"]), OrderedDict(["t"=>"Period"])], - codegen_nestable_children(t, ast, node) - ] + jsonify([ + :t=>jsonify(:OrderedList) + :c=>jsonify([ + jsonify(["1", jsonify([:t=>jsonify(:Decimal)]), jsonify([:t=>jsonify(:Period)])]), + jsonify(codegen_nestable_children(t, ast, node)) + ]) ]) end function codegen(t::JSONTarget, ::NestableItem, ast::NorgDocument, node::Node) - res = [] + res = String[] for c in children(node) - if kind(c) ∉ KSet"TimestampExtension PriorityExtension DueDateExtension StartDateExtension" + if kind(c) == K"IndentSegment" + append!(res, codegen(t, ast, c)) + elseif kind(c) ∉ KSet"TimestampExtension PriorityExtension DueDateExtension StartDateExtension" r = codegen(t, ast, c) - - if r isa Vector - append!(res, r) - elseif !isempty(r) + if !isempty(r) push!(res, r) end end end - res + jsonify(res) end function codegen(t::JSONTarget, ::Quote, ast::NorgDocument, node::Node) #
does not have an 'item' notion, so we have to short-circuit # that. - res = [] + res = String[] for c in children(node) append!(res, filter(!isempty, codegen.(Ref(t), Ref(ast), children(c)))|>collect) end - OrderedDict([ - "t"=>"BlockQuote" - "c"=>res + jsonify([ + :t=>jsonify(:BlockQuote) + :c=>jsonify(res) ]) end @@ -417,25 +442,34 @@ function codegen(t::JSONTarget, ::StandardRangedTag, ast::NorgDocument, node::No tag, others... = children(node) tag_litteral = litteral(ast, tag) if tag_litteral == "comment" - OrderedDict() + "" elseif tag_litteral == "example" - OrderedDict([ - "t"=>"CodeBlock" - "c"=>[["", ["norg"], []], textify(ast, last(others))] + jsonify([ + :t=>jsonify(:CodeBlock) + :c=>jsonify([ + jsonify([Symbol(""), jsonify(["\"norg\""]), jsonify([])]), + "\""*textify(ast, last(others), escape_string)*"\"" + ]) ]) elseif tag_litteral == "details" # TODO - OrderedDict() + "" elseif tag_litteral == "group" - OrderedDict([ - "t"=>"Div", - "c"=>[["", [], []], codegen_children(t, ast, last(others))] + jsonify([ + :t=>jsonify(:Div), + :c=>jsonify([ + jsonify([Symbol(""), jsonify([]), jsonify([])]), + jsonify(codegen_children(t, ast, last(others))) + ]) ]) else @warn "Unknown standard ranged tag." tag_litteral ast.tokens[AST.start(node)] ast.tokens[AST.stop(node)] - OrderedDict([ - "t"=>"Div", - "c"=>[["", [], []], codegen_children(t, ast, last(others))] + jsonify([ + :t=>jsonify(:Div) + :c=>jsonify([ + jsonify([Symbol(""), jsonify([]), jsonify([])]), + jsonify(codegen_children(t, ast, last(others))) + ]) ]) end end @@ -444,12 +478,15 @@ function codegen(::JSONTarget, ::Verbatim, ast::NorgDocument, node::Node) # cowardly ignore any verbatim that is not code tag, others... = children(node) if litteral(ast, tag) != "code" - return OrderedDict() + return "" end if length(others) == 1 - OrderedDict([ - "t"=>"CodeBlock" - "c"=>[["", [], []], textify(ast, last(others))] + jsonify([ + :t=>jsonify(:CodeBlock) + :c=>jsonify([ + jsonify([Symbol(""), jsonify([]), jsonify([])]), + "\""*textify(ast, last(others), escape_string)*"\"" + ]) ]) else language = if kind(first(others)) == K"TagParameter" @@ -457,9 +494,12 @@ function codegen(::JSONTarget, ::Verbatim, ast::NorgDocument, node::Node) else litteral(ast, others[2]) end - OrderedDict([ - "t"=>"CodeBlock" - "c"=>[["", [language], []], textify(ast, last(others))] + jsonify([ + :t=>jsonify(:CodeBlock) + :c=>jsonify([ + jsonify([Symbol(""), jsonify(["\""*language*"\""]), jsonify([])]), + "\""*textify(ast, last(others), escape_string)*"\"" + ]) ]) end end @@ -472,66 +512,66 @@ function codegen(::JSONTarget, ::TodoExtension, ast::NorgDocument, node::Node) else s = "☐" end - OrderedDict([ - "t"=>"Plain" - "c"=>[OrderedDict([ - "t"=>"Str" - "c"=>s - ])] + jsonify([ + :t=>jsonify(:Plain) + :c=>jsonify([jsonify([ + :t=>jsonify(:Str) + :c=>s + ])]) ]) end function codegen(t::JSONTarget, c::Union{WeakCarryoverTag, StrongCarryoverTag}, ast::NorgDocument, node::Node) content = codegen(t, ast, last(children(node))) - label = textify(ast, first(children(node))) + label = "\""*textify(ast, first(children(node)), escape_string)*"\"" # TODO: there's most likely some room for improvement here, as some contents # already have a mechanism for attributes, so the Div is not needed. - attr = ["", [], []] + attr = [Symbol(""), jsonify([]), jsonify([])] if length(children(node)) <= 2 - attr[2] = [label] + attr[2] = jsonify([label]) elseif length(children(node)) == 3 - attr[3] = [[label, textify(ast, children(node)[2])]] + attr[3] = jsonify([jsonify([label, "\""*textify(ast, children(node)[2], escape_string)*"\""])]) else - attr[2] = [join(textify.(Ref(ast), children(node)[1:end-1]), "-")] + attr[2] = jsonify(["\""*join(textify.(Ref(ast), children(node)[1:end-1], escape_string), "-")*"\""]) end t = if kind(node) == K"WeakCarryoverTag" && kind(last(children(node)))==K"ParagraphSegment" - "Span" + :Span else - "Div" + :Div end - if !(content isa Vector) - content = [content] + if !(first(content) == '[') + content = jsonify([content]) end - OrderedDict([ - "t"=>t, - "c"=>[attr, content] + jsonify([ + :t=>jsonify(t), + :c=>jsonify([jsonify(attr), content]) ]) end function codegen(t::JSONTarget, ::Definition, ast::NorgDocument, node::Node) items = children(node) - OrderedDict([ - "t"=>"DefinitionList" - "c"=>map(items) do item + jsonify([ + :t=>jsonify(:DefinitionList) + :c=>map(items) do item term, def... = children(item) - term_id = "def_" * idify(textify(ast, term)) - term_node = OrderedDict([ - "t"=>"Span" - "c"=>[ - (term_id, [], []), - codegen(t, ast, term) - ] + term_id = "def_" * idify(textify(ast, term, escape_string)) + term_node = jsonify([ + :t=>jsonify(:Span) + :c=>jsonify([ + jsonify(["\""*term_id*"\"", jsonify([]), jsonify([])]), + jsonify([codegen(t, ast, term)]) + ]) ]) - def_node = codegen.(Ref(t), Ref(ast), def) - ([term_node], [def_node]) - end + def_node = jsonify(codegen.(Ref(t), Ref(ast), def)) + jsonify([jsonify([term_node]), jsonify([def_node])]) + end |> jsonify ]) end -function codegen(t::JSONTarget, ::Footnote, ast::NorgDocument, node::Node) +function codegen(::JSONTarget, ::Footnote, ast::NorgDocument, node::Node) # Return nothing, pandoc expects footnotes to be defined where they are called. - [] + "" end function codegen(t::JSONTarget, ::Slide, ast::NorgDocument, node::Node) diff --git a/src/parser/link.jl b/src/parser/link.jl index 61c5769..9038439 100644 --- a/src/parser/link.jl +++ b/src/parser/link.jl @@ -173,7 +173,7 @@ function parse_norg(::DetachedModifierLocation, parents::Vector{Kind}, tokens::V else c = [AST.Node(K"WordNode", [], j, j) for j ∈ start:(p.start-1)] children = p.children - ps = AST.Node(K"ParagraphSegment", [c...;children[1].children...], start, children[1].stop) + ps = AST.Node(K"ParagraphSegment", AST.Node[c...;children[1].children...], start, children[1].stop) children[1] = ps AST.Node(K"None", children, start, i) end @@ -211,7 +211,7 @@ function parse_norg(::MagicLocation, parents::Vector{Kind}, tokens::Vector{Token else c = [AST.Node(K"WordNode", [], j, j) for j ∈ start:(p.start-1)] children = p.children - ps = AST.Node(K"ParagraphSegment", [c...;children[1].children...], start, children[1].stop) + ps = AST.Node(K"ParagraphSegment", AST.Node[c...;children[1].children...], start, children[1].stop) children[1] = ps AST.Node(K"None", children, start, i) end @@ -314,7 +314,7 @@ function parse_norg(::WikiLocation, parents::Vector{Kind}, tokens::Vector{Token} else c = [AST.Node(K"WordNode", [], j, j) for j ∈ start:(p.start-1)] children = p.children - ps = AST.Node(K"ParagraphSegment", [c...;children[1].children...], start, children[1].stop) + ps = AST.Node(K"ParagraphSegment", AST.Node[c...;children[1].children...], start, children[1].stop) children[1] = ps return AST.Node(K"None", children, start, i) end diff --git a/src/utils.jl b/src/utils.jl index a3bf439..d8c6b6e 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -38,15 +38,15 @@ function idify(text) end """ - textify(ast, node) + textify(ast, node, escape=identity) -Return the raw text associated with a node. +Return the raw text associated with a node. You can specify an escape function. """ -function textify(ast::NorgDocument, node::Node) +function textify(ast::NorgDocument, node::Node, escape=identity) if is_leaf(node) - AST.litteral(ast, node) + escape(AST.litteral(ast, node)) else - join(textify(ast, c) for c in children(node)) + join(textify(ast, c, escape) for c in children(node)) end end diff --git a/test/code_analysis_tests/test_jet.jl b/test/code_analysis_tests/test_jet.jl index ffecb4f..1f1dc37 100644 --- a/test/code_analysis_tests/test_jet.jl +++ b/test/code_analysis_tests/test_jet.jl @@ -1,4 +1,4 @@ -using JET, AbstractTrees, OrderedCollections +using JET, AbstractTrees @testset "JET.jl -> See https://aviatesk.github.io/JET.jl/stable/jetanalysis/#Errors-kinds-and-how-to-fix-them" begin payload = open(Norg.NORG_SPEC_PATH, "r") do f @@ -20,7 +20,7 @@ ast = norg(payload) @test_opt ignored_modules=(AbstractTrees, Base) norg(payload) # Codegen @test_opt ignored_modules=(AbstractTrees, Base) Norg.codegen(HTMLTarget(), ast) -@test_opt ignored_modules=(AbstractTrees, OrderedCollections, Base) Norg.codegen(JSONTarget(), ast) +@test_opt ignored_modules=(AbstractTrees, Base) Norg.codegen(JSONTarget(), ast) end diff --git a/test/codegen_tests/json.jl b/test/codegen_tests/json.jl index f174c56..f27a760 100644 --- a/test/codegen_tests/json.jl +++ b/test/codegen_tests/json.jl @@ -1,6 +1,5 @@ @testset "JSON target" begin -using OrderedCollections -import JSON +using JSON # generated JSON correctness is checked directly with pandoc using pandoc_jll @@ -9,7 +8,7 @@ function pandoc_approval(json) try pandoc() do pandoc_bin io = PipeBuffer() - JSON.print(io, json) + write(io, json) run(pipeline(`$(pandoc_bin) -f json -t json`, stdin=io, stdout=devnull, stderr=io_err)) end catch e @@ -22,8 +21,9 @@ end @testset "Test paragraphs" begin s = "Hi I am first paragraph.\n\nOh, hello there, I am second paragraph !" - json = Norg.codegen(Norg.JSONTarget(), Norg.parse_norg(Norg.tokenize(s))) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) pars = json["blocks"] @test pars[1]["t"] == "Para" @test pars[2]["t"] == "Para" @@ -53,16 +53,18 @@ simple_markups_class = [ @testset "Test correct markup for $m" for (m, node) in simple_markups_nodes s = "$(m)inner$(m)" - json = Norg.codegen(Norg.JSONTarget(), Norg.parse_norg(Norg.tokenize(s))) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) b = json["blocks"][1]["c"][1] @test b["t"] == node end @testset "Test correct class for $m" for (m, class) in simple_markups_class s = "$(m)inner$(m)" - json = Norg.codegen(Norg.JSONTarget(), Norg.parse_norg(Norg.tokenize(s))) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) b = json["blocks"][1]["c"][1] if !isnothing(class) @test first(b["c"])[2][1] == class @@ -88,8 +90,9 @@ simple_link_tests = [ @testset "Test links: $link" for (link, target, text) in simple_link_tests s = "{$link}" - json = Norg.codegen(Norg.JSONTarget(), Norg.parse_norg(Norg.tokenize(s))) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) link = json["blocks"][1]["c"][1] @test link["t"] == "Link" @test link["c"][2][1]["t"] == "Str" @@ -99,8 +102,9 @@ end @testset "Test links with description: $link" for (link, target) in simple_link_tests s = "{$link}[website]" - json = Norg.codegen(Norg.JSONTarget(), Norg.parse_norg(Norg.tokenize(s))) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) link = json["blocks"][1]["c"][1] @test link["t"] == "Link" @test link["c"][2][1]["t"] == "Str" @@ -110,8 +114,9 @@ end @testset "Anchors with embedded definition: $link" for (link, target) in simple_link_tests s = "[website]{$link}" - json = Norg.codegen(Norg.JSONTarget(), Norg.parse_norg(Norg.tokenize(s))) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) link = json["blocks"][1]["c"][1] @test link["t"] == "Link" @test link["c"][2][1]["t"] == "Str" @@ -126,8 +131,9 @@ end json = norg(Norg.JSONTarget(), s) @end """ - json = norg(JSONTarget(), s) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) cb = json["blocks"][1] @test cb["t"] == "CodeBlock" attr, content = cb["c"] @@ -141,8 +147,9 @@ heading_levels = 1:6 s = """$(repeat("*", i)) heading text """ - json = norg(JSONTarget(), s) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) container = json["blocks"][1] @test container["t"] == "Div" attr, content = container["c"] @@ -161,16 +168,18 @@ nestable_lists = ['~'=>"OrderedList", '-'=>"BulletList", ">"=>"BlockQuote"] $m Shintero yuo been na $m Na sinchere fedicheda """ - json = norg(JSONTarget(), s) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) list = json["blocks"][1] @test list["t"] == target end @testset "inline link" begin s = """""" - json = norg(JSONTarget(), s) - @test pandoc_approval(json) + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) p = json["blocks"][1] @test length(p["c"]) == 1 span = first(p["c"]) @@ -186,7 +195,6 @@ end end json = norg(JSONTarget(), s) @test pandoc_approval(json) - @test json isa OrderedDict end end From 0414a0238a978b4d59ce957497ac1f4682ac621b Mon Sep 17 00:00:00 2001 From: Hugo Levy-Falk Date: Sun, 17 Sep 2023 21:18:50 +0200 Subject: [PATCH 4/9] feat: SnoopCompile-d the package, removed some invalidations. --- Project.toml | 7 ++++++- ext/TimeZonesExt.jl | 18 ++++++++++++++++++ src/Norg.jl | 2 ++ src/kind.jl | 8 +++----- src/semantics/timestamps.jl | 22 +++++++++++----------- src/tokens.jl | 2 +- 6 files changed, 41 insertions(+), 18 deletions(-) create mode 100644 ext/TimeZonesExt.jl diff --git a/Project.toml b/Project.toml index 0ec622c..1f74891 100644 --- a/Project.toml +++ b/Project.toml @@ -5,13 +5,18 @@ version = "0.3.0" [deps] AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" -Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" HypertextLiteral = "ac1192a8-f4b3-4bfe-ba22-af5b92cd3ab2" OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" SnoopPrecompile = "66db9d55-30c0-4569-8b51-7e840670fc0c" +Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[weakdeps] TimeZones = "f269a46b-ccf7-5d73-abea-4c690281aa53" +[extensions] +TimeZonesExt = "TimeZones" + [compat] AbstractTrees = "0.4" OrderedCollections = "1" diff --git a/ext/TimeZonesExt.jl b/ext/TimeZonesExt.jl new file mode 100644 index 0000000..cae94d8 --- /dev/null +++ b/ext/TimeZonesExt.jl @@ -0,0 +1,18 @@ +module TimeZonesExt +using Dates, TimeZones, Norg + +Norg.HAS_TIMEZONES_CAPABILITIES = true + +function parse_timezone(::Val{:extension}, w) + timezone = nothing + try + timezone = TimeZone(w) + catch e + if e isa ArgumentError + @warn "Unable to process timezone" w tokens[i] + else + rethrow(e) + end + end + timezone +end diff --git a/src/Norg.jl b/src/Norg.jl index 4d75485..6f0325c 100644 --- a/src/Norg.jl +++ b/src/Norg.jl @@ -36,6 +36,8 @@ Path to the Norg semantics specification. """ const NORG_SEMANTICS_PATH = joinpath(NORG_SPEC_ROOT, "1.0-semantics.norg") +HAS_TIMEZONES_CAPABILITIES = false + using AbstractTrees include("kind.jl") diff --git a/src/kind.jl b/src/kind.jl index 368f9f2..4561e43 100644 --- a/src/kind.jl +++ b/src/kind.jl @@ -223,8 +223,6 @@ let kind_int_type = :UInt8, return Base.bitcast(Kind, convert($kind_int_type, x)) end - Base.convert(::Type{String}, k::Kind) = _kind_names[1 + Base.bitcast($kind_int_type, k)] - let kindstr_to_int = Dict(s=>i-1 for (i,s) in enumerate(_kind_names)) function Base.convert(::Type{Kind}, s::AbstractString) i = get(kindstr_to_int, s) do @@ -234,8 +232,8 @@ let kind_int_type = :UInt8, end end - Base.string(x::Kind) = convert(String, x) - Base.print(io::IO, x::Kind) = print(io, convert(String, x)) + Base.string(k::Kind) = _kind_names[1 + Base.bitcast($kind_int_type, k)] + Base.print(io::IO, x::Kind) = print(io, string(x)) Base.typemin(::Type{Kind}) = Kind(0) Base.typemax(::Type{Kind}) = Kind($max_kind_int) @@ -248,7 +246,7 @@ let kind_int_type = :UInt8, end function Base.show(io::IO, k::Kind) - print(io, "K\"$(convert(String, k))\"") + print(io, "K\"$(string(k))\"") end """ diff --git a/src/semantics/timestamps.jl b/src/semantics/timestamps.jl index 7dcb6ff..de0d6fc 100644 --- a/src/semantics/timestamps.jl +++ b/src/semantics/timestamps.jl @@ -1,5 +1,4 @@ -using Dates, TimeZones - +using Dates """ parse_norg_timestamp(tokens, start, stop) @@ -177,15 +176,7 @@ function parse_one_norg_timestamp(tokens, start, stop) stop_timestamp = Parser.consume_until(KSet"Whitespace -", tokens, i)-2 if stop_timestamp <= stop w = join(value.(tokens[i:stop_timestamp])) - try - timezone = TimeZone(w) - catch e - if e isa ArgumentError - @warn "Unable to process timezone" w tokens[i] - else - rethrow(e) - end - end + timezone = parse_timezone(w) i = stop_timestamp + 1 end end @@ -241,3 +232,12 @@ function parse_year(tokens, start, _) w = value(token) tryparse(Int64, w) end + +function parse_timezone(w) + if HAS_TIMEZONE_CAPABILITIES + parse_timezone(Val(:extension), w) + else + nothing + end +end + diff --git a/src/tokens.jl b/src/tokens.jl index 8b97f18..357e319 100644 --- a/src/tokens.jl +++ b/src/tokens.jl @@ -49,7 +49,7 @@ function Token(kind, line, char, value) end function Base.show(io::IO, token::Token) print(io, - "Token(K\"$(convert(String, kind(token)))\", \"$(value(token))\", line $(string(line(token))), col. $(string(char(token))))") + "Token(K\"$(string(kind(token)))\", \"$(value(token))\", line $(string(line(token))), col. $(string(char(token))))") end SOFToken() = Token(K"StartOfFile", 0, 0, SubString("")) EOFToken() = Token(K"EndOfFile", 0, 0, SubString("")) From a55cf4ecd18f1f720019f1a6cc85efcd202176a6 Mon Sep 17 00:00:00 2001 From: Hugo Levy-Falk Date: Sun, 17 Sep 2023 21:36:31 +0200 Subject: [PATCH 5/9] feat: Aqua.jl pass. --- Project.toml | 14 +++++++++----- test/Project.toml | 2 +- test/code_analysis_tests/test_jet.jl | 2 +- test/runtests.jl | 2 ++ 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/Project.toml b/Project.toml index 1f74891..bf2e509 100644 --- a/Project.toml +++ b/Project.toml @@ -5,11 +5,10 @@ version = "0.3.0" [deps] AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" +Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" HypertextLiteral = "ac1192a8-f4b3-4bfe-ba22-af5b92cd3ab2" -OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" SnoopPrecompile = "66db9d55-30c0-4569-8b51-7e840670fc0c" -Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" [weakdeps] TimeZones = "f269a46b-ccf7-5d73-abea-4c690281aa53" @@ -19,14 +18,19 @@ TimeZonesExt = "TimeZones" [compat] AbstractTrees = "0.4" -OrderedCollections = "1" -SnoopPrecompile = "1" HypertextLiteral = "0.9" +SnoopPrecompile = "1" TimeZones = "1" julia = "1" [extras] +AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" +Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" +Gumbo = "708ec375-b3d6-5a57-a7ce-8257bf98657a" +JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" +JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +pandoc_jll = "c5432543-76ad-5c9d-82bf-db097047a5e2" [targets] -test = ["Test"] +test = ["AbstractTrees", "Aqua", "Gumbo", "JET", "JSON", "Test", "pandoc_jll"] diff --git a/test/Project.toml b/test/Project.toml index 6744bdf..4ffb13d 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -1,8 +1,8 @@ [deps] AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" +Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" Gumbo = "708ec375-b3d6-5a57-a7ce-8257bf98657a" JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" -OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" pandoc_jll = "c5432543-76ad-5c9d-82bf-db097047a5e2" diff --git a/test/code_analysis_tests/test_jet.jl b/test/code_analysis_tests/test_jet.jl index 1f1dc37..ba35744 100644 --- a/test/code_analysis_tests/test_jet.jl +++ b/test/code_analysis_tests/test_jet.jl @@ -20,7 +20,7 @@ ast = norg(payload) @test_opt ignored_modules=(AbstractTrees, Base) norg(payload) # Codegen @test_opt ignored_modules=(AbstractTrees, Base) Norg.codegen(HTMLTarget(), ast) -@test_opt ignored_modules=(AbstractTrees, Base) Norg.codegen(JSONTarget(), ast) +@test_opt broken=true ignored_modules=(AbstractTrees, Base) Norg.codegen(JSONTarget(), ast) end diff --git a/test/runtests.jl b/test/runtests.jl index ab00b2a..6d91d42 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,6 +1,7 @@ using Norg using Test using AbstractTrees +using Aqua import Norg: @K_str, kind, value @@ -25,5 +26,6 @@ import Norg: @K_str, kind, value end @testset "code analysis" begin include("code_analysis_tests/test_jet.jl") + Aqua.test_all(Norg) end end From e8d60ba8a379df6a0749c7bc70475528feb65964 Mon Sep 17 00:00:00 2001 From: Hugo Levy-Falk Date: Mon, 18 Sep 2023 23:09:59 +0200 Subject: [PATCH 6/9] chore: Make CI happy. --- .JuliaFormatter.toml | 4 +- Project.toml | 1 + docs/make.jl | 95 +- ext/TimeZonesExt.jl | 9 +- src/Norg.jl | 20 +- src/assets/norg_line_ending.jl | 4 +- src/assets/norg_punctuation.jl | 998 +++++++++++++++--- src/assets/norg_whitespace.jl | 46 +- src/ast.jl | 38 +- src/codegen.jl | 13 +- src/codegen/html.jl | 146 +-- src/codegen/json.jl | 537 ++++++---- src/kind.jl | 319 +++--- src/match/attached_modifiers.jl | 55 +- src/match/detached_modifiers.jl | 19 +- src/match/links.jl | 6 +- src/match/match.jl | 71 +- src/match/rangeable_detached_modifier.jl | 16 +- src/match/tags.jl | 12 +- src/parser/attachedmodifier.jl | 21 +- src/parser/detachedmodifierextensions.jl | 19 +- src/parser/detachedmodifiersuffix.jl | 8 +- src/parser/link.jl | 107 +- src/parser/nestablemodifier.jl | 18 +- src/parser/parser.jl | 66 +- src/parser/rangeabledetachedmodifier.jl | 24 +- src/parser/structuralmodifier.jl | 16 +- src/parser/tag.jl | 23 +- src/parser/verbatim.jl | 1 + src/scanners.jl | 23 +- src/semantics/timestamps.jl | 36 +- src/strategy.jl | 31 +- src/tokenize.jl | 4 +- src/tokens.jl | 8 +- src/utils.jl | 36 +- .../test_detached_modifier_extension.jl | 13 +- .../test_detached_modifier_suffix.jl | 95 +- test/ast_tests/test_headings.jl | 4 +- test/ast_tests/test_links.jl | 396 +++---- test/ast_tests/test_markup.jl | 78 +- .../test_nestable_detached_modifiers.jl | 19 +- test/ast_tests/test_paragraphs.jl | 8 +- .../test_rangeable_detached_modifiers.jl | 49 +- test/ast_tests/test_tags.jl | 224 ++-- test/code_analysis_tests/test_aqua.jl | 7 + test/code_analysis_tests/test_jet.jl | 41 +- test/codegen_tests/html.jl | 311 +++--- test/codegen_tests/json.jl | 376 +++---- test/runtests.jl | 14 +- test/test_scanners.jl | 38 +- 50 files changed, 2798 insertions(+), 1725 deletions(-) create mode 100644 test/code_analysis_tests/test_aqua.jl diff --git a/.JuliaFormatter.toml b/.JuliaFormatter.toml index 6bd59fd..323237b 100644 --- a/.JuliaFormatter.toml +++ b/.JuliaFormatter.toml @@ -1,3 +1 @@ -style = "sciml" -format_docstrings = true -margin = 80 +style = "blue" diff --git a/Project.toml b/Project.toml index bf2e509..f60090e 100644 --- a/Project.toml +++ b/Project.toml @@ -9,6 +9,7 @@ Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" HypertextLiteral = "ac1192a8-f4b3-4bfe-ba22-af5b92cd3ab2" Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" SnoopPrecompile = "66db9d55-30c0-4569-8b51-7e840670fc0c" +TimeZones = "f269a46b-ccf7-5d73-abea-4c690281aa53" [weakdeps] TimeZones = "f269a46b-ccf7-5d73-abea-4c690281aa53" diff --git a/docs/make.jl b/docs/make.jl index 9a914f1..290a7a4 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -10,18 +10,19 @@ end; md_path = joinpath(@__DIR__, "src", "1.0-specification.md") ast = norg(s) function mk_toc(ast) - toc_tree = filter(!isnothing, [mk_toc(ast, c) for c in children(ast.root)]) + return toc_tree = filter(!isnothing, [mk_toc(ast, c) for c in children(ast.root)]) end function mk_toc(ast, node) c = children(node) if !Norg.AST.is_heading(node) nothing - else - h, node_children... = c + else + h, node_children... = c level = Norg.AST.heading_level(node) - (title=Norg.Codegen.textify(ast, h), - level = level, - children=filter([mk_toc(ast, c) for c in node_children]) do c + ( + title=Norg.Codegen.textify(ast, h), + level=level, + children=filter([mk_toc(ast, c) for c in node_children]) do c if isnothing(c) false elseif c.level >= 3 @@ -31,16 +32,14 @@ function mk_toc(ast, node) else true end - end + end, ) end end toc = mk_toc(ast) function mk_html_toc(toc_elem) - href = "#"*"h$(toc_elem.level)-"*Norg.Codegen.idify(toc_elem.title) - lis = [ - @htl("
  • $(mk_html_toc(t))
  • ") for t in toc_elem.children - ] + href = "#" * "h$(toc_elem.level)-" * Norg.Codegen.idify(toc_elem.title) + lis = [@htl("
  • $(mk_html_toc(t))
  • ") for t in toc_elem.children] @htl """$(toc_elem.title)
      @@ -53,10 +52,13 @@ lis = [@htl("
    • $(mk_html_toc(c))
    • ") for c in toc] toc_html = @htl """
        $lis
      """ open(md_path, "w") do f - write(f, """This is an automated rendering of the [norg specification](https://github.com/nvim-neorg/norg-specs) using Norg.jl. + write( + f, + """This is an automated rendering of the [norg specification](https://github.com/nvim-neorg/norg-specs) using Norg.jl. - # Table of contents - """) +# Table of contents +""", + ) write(f, "```@raw html\n") write(f, string(toc_html)) write(f, "\n") @@ -64,41 +66,40 @@ open(md_path, "w") do f write(f, "\n```") end -DocMeta.setdocmeta!(Norg, :DocTestSetup, :(using Norg); recursive = true) +DocMeta.setdocmeta!(Norg, :DocTestSetup, :(using Norg); recursive=true) makedocs(; - modules = [Norg], - authors = "Hugo Levy-Falk and contributors", - repo = "https://github.com/Klafyvel/Norg.jl/blob/{commit}{path}#{line}", - sitename = "Norg.jl", - format = Documenter.HTML(; - prettyurls = true, - canonical = "https://klafyvel.github.io/Norg.jl", - edit_link = "main", - assets = String[]), - pages = [ - "Home" => "index.md", - "Specification" => "1.0-specification.md", - "Internals" => [ - "How parsing works" => "internals.md", - "Private API" => [ - "internals/kinds.md" - "internals/tokens.md" - "internals/scanners.md" - "internals/match.md" - "internals/parser.md" - "Code generation" => [ - "internals/codegen/index.md" - "Targets" => [ - "internals/codegen/html.md" - "internals/codegen/json.md" - ] + modules=[Norg], + authors="Hugo Levy-Falk and contributors", + repo="https://github.com/Klafyvel/Norg.jl/blob/{commit}{path}#{line}", + sitename="Norg.jl", + format=Documenter.HTML(; + prettyurls=true, + canonical="https://klafyvel.github.io/Norg.jl", + edit_link="main", + assets=String[], + ), + pages=[ + "Home" => "index.md", + "Specification" => "1.0-specification.md", + "Internals" => [ + "How parsing works" => "internals.md", + "Private API" => [ + "internals/kinds.md" + "internals/tokens.md" + "internals/scanners.md" + "internals/match.md" + "internals/parser.md" + "Code generation" => [ + "internals/codegen/index.md" + "Targets" => [ + "internals/codegen/html.md" + "internals/codegen/json.md" ] ] - ] - ]) + ], + ], + ], +) - -deploydocs(; - repo = "github.com/Klafyvel/Norg.jl", - devbranch = "main") +deploydocs(; repo="github.com/Klafyvel/Norg.jl", devbranch="main") diff --git a/ext/TimeZonesExt.jl b/ext/TimeZonesExt.jl index cae94d8..99ae288 100644 --- a/ext/TimeZonesExt.jl +++ b/ext/TimeZonesExt.jl @@ -1,7 +1,9 @@ module TimeZonesExt using Dates, TimeZones, Norg -Norg.HAS_TIMEZONES_CAPABILITIES = true +@static if VERSION ≥ v"1.9" + Norg.HAS_TIMEZONES_CAPABILITIES = true +end function parse_timezone(::Val{:extension}, w) timezone = nothing @@ -9,10 +11,11 @@ function parse_timezone(::Val{:extension}, w) timezone = TimeZone(w) catch e if e isa ArgumentError - @warn "Unable to process timezone" w tokens[i] + @warn "Unable to process timezone" w else rethrow(e) end end - timezone + return timezone +end end diff --git a/src/Norg.jl b/src/Norg.jl index 6f0325c..7d3adb2 100644 --- a/src/Norg.jl +++ b/src/Norg.jl @@ -36,7 +36,11 @@ Path to the Norg semantics specification. """ const NORG_SEMANTICS_PATH = joinpath(NORG_SPEC_ROOT, "1.0-semantics.norg") -HAS_TIMEZONES_CAPABILITIES = false +@static if VERSION < v"1.9" + HAS_TIMEZONES_CAPABILITIES = true +else + HAS_TIMEZONES_CAPABILITIES = false +end using AbstractTrees @@ -64,7 +68,6 @@ include("semantics/timestamps.jl") include("codegen.jl") using .Codegen - """ norg([codegentarget, ] s) @@ -85,8 +88,8 @@ julia> norg(HTMLTarget(), "* Hello world!")

      Hello world!

        ``` """ -norg(s)= parse_norg(tokenize(s)) -norg(t::T, s) where {T <: Codegen.CodegenTarget} = codegen(t, norg(s)) +norg(s) = parse_norg(tokenize(s)) +norg(t::T, s) where {T<:Codegen.CodegenTarget} = codegen(t, norg(s)) """ Easily parse Norg string to an AST. This can be used in *e.g.* Pluto notebooks, @@ -107,12 +110,12 @@ NorgDocument └─ Example ``` """ -macro norg_str(s, t ...) - norg(s) +macro norg_str(s, t...) + return norg(s) end function Base.show(io::IO, ::MIME"text/html", ast::AST.NorgDocument) - print(io, codegen(HTMLTarget(), ast)) + return print(io, codegen(HTMLTarget(), ast)) end using SnoopPrecompile @@ -204,4 +207,7 @@ end export HTMLTarget, JSONTarget export @norg_str, norg +if !isdefined(Base, :get_extension) + include("../ext/TimeZonesExt.jl") +end end diff --git a/src/assets/norg_line_ending.jl b/src/assets/norg_line_ending.jl index 7074419..7935695 100644 --- a/src/assets/norg_line_ending.jl +++ b/src/assets/norg_line_ending.jl @@ -2,7 +2,5 @@ All the UTF-8 characters that Norg specifies as a whitespace. """ const NORG_LINE_ENDING = String[ - string(Char(0x000A)), - string(Char(0x000D)), - String([Char(0x000D), Char(0x000A)]), + string(Char(0x000A)), string(Char(0x000D)), String([Char(0x000D), Char(0x000A)]) ] diff --git a/src/assets/norg_punctuation.jl b/src/assets/norg_punctuation.jl index 7a2ebe1..218de7b 100644 --- a/src/assets/norg_punctuation.jl +++ b/src/assets/norg_punctuation.jl @@ -2,169 +2,835 @@ All the UTF-8 characters that are punctuation in Norg specification. See [the norg specification](https://github.com/nvim-neorg/norg-specs/blob/main/1.0-specification.norg) """ -const NORG_PUNCTUATION = Set(Char[0x0024, 0x002B, 0x003C, 0x003D, 0x003E, - 0x005E, 0x0060, 0x007C, 0x007E, 0x005F, - 0x203F, 0x2040, 0x2054, 0xFE33, 0xFE34, - 0xFE4D, 0xFE4E, 0xFE4F, 0xFF3F, 0x002D, - 0x058A, 0x05BE, 0x1400, 0x1806, 0x2010, - 0x2011, 0x2012, 0x2013, 0x2014, 0x2015, - 0x2E17, 0x2E1A, 0x2E3A, 0x2E3B, 0x2E40, - 0x2E5D, 0x301C, 0x3030, 0x30A0, 0xFE31, - 0xFE32, 0xFE58, 0xFE63, 0xFF0D, 0x10EAD, - 0x0029, 0x005D, 0x007D, 0x0F3B, 0x0F3D, - 0x169C, 0x2046, 0x207E, 0x208E, 0x2309, - 0x230B, 0x232A, 0x2769, 0x276B, 0x276D, - 0x276F, 0x2771, 0x2773, 0x2775, 0x27C6, - 0x27E7, 0x27E9, 0x27EB, 0x27ED, 0x27EF, - 0x2984, 0x2986, 0x2988, 0x298A, 0x298C, - 0x298E, 0x2990, 0x2992, 0x2994, 0x2996, - 0x2998, 0x29D9, 0x29DB, 0x29FD, 0x2E23, - 0x2E25, 0x2E27, 0x2E29, 0x2E56, 0x2E58, - 0x2E5A, 0x2E5C, 0x3009, 0x300B, 0x300D, - 0x300F, 0x3011, 0x3015, 0x3017, 0x3019, - 0x301B, 0x301E, 0x301F, 0xFD3E, 0xFE18, - 0xFE36, 0xFE38, 0xFE3A, 0xFE3C, 0xFE3E, - 0xFE40, 0xFE42, 0xFE44, 0xFE48, 0xFE5A, - 0xFE5C, 0xFE5E, 0xFF09, 0xFF3D, 0xFF5D, - 0xFF60, 0xFF63, 0x00BB, 0x2019, 0x201D, - 0x203A, 0x2E03, 0x2E05, 0x2E0A, 0x2E0D, - 0x2E1D, 0x2E21, 0x00AB, 0x2018, 0x201B, - 0x201C, 0x201F, 0x2039, 0x2E02, 0x2E04, - 0x2E09, 0x2E0C, 0x2E1C, 0x2E20, 0x0021, - 0x0022, 0x0023, 0x0025, 0x0026, 0x0027, - 0x002A, 0x002C, 0x002E, 0x002F, 0x003A, - 0x003B, 0x003F, 0x0040, 0x005C, 0x00A1, - 0x00A7, 0x00B6, 0x00B7, 0x00BF, 0x037E, - 0x0387, 0x055A, 0x055B, 0x055C, 0x055D, - 0x055E, 0x055F, 0x0589, 0x05C0, 0x05C3, - 0x05C6, 0x05F3, 0x05F4, 0x0609, 0x060A, - 0x060C, 0x060D, 0x061B, 0x061D, 0x061E, - 0x061F, 0x066A, 0x066B, 0x066C, 0x066D, - 0x06D4, 0x0700, 0x0701, 0x0702, 0x0703, - 0x0704, 0x0705, 0x0706, 0x0707, 0x0708, - 0x0709, 0x070A, 0x070B, 0x070C, 0x070D, - 0x07F7, 0x07F8, 0x07F9, 0x0830, 0x0831, - 0x0832, 0x0833, 0x0834, 0x0835, 0x0836, - 0x0837, 0x0838, 0x0839, 0x083A, 0x083B, - 0x083C, 0x083D, 0x083E, 0x085E, 0x0964, - 0x0965, 0x0970, 0x09FD, 0x0A76, 0x0AF0, - 0x0C77, 0x0C84, 0x0DF4, 0x0E4F, 0x0E5A, - 0x0E5B, 0x0F04, 0x0F05, 0x0F06, 0x0F07, - 0x0F08, 0x0F09, 0x0F0A, 0x0F0B, 0x0F0C, - 0x0F0D, 0x0F0E, 0x0F0F, 0x0F10, 0x0F11, - 0x0F12, 0x0F14, 0x0F85, 0x0FD0, 0x0FD1, - 0x0FD2, 0x0FD3, 0x0FD4, 0x0FD9, 0x0FDA, - 0x104A, 0x104B, 0x104C, 0x104D, 0x104E, - 0x104F, 0x10FB, 0x1360, 0x1361, 0x1362, - 0x1363, 0x1364, 0x1365, 0x1366, 0x1367, - 0x1368, 0x166E, 0x16EB, 0x16EC, 0x16ED, - 0x1735, 0x1736, 0x17D4, 0x17D5, 0x17D6, - 0x17D8, 0x17D9, 0x17DA, 0x1800, 0x1801, - 0x1802, 0x1803, 0x1804, 0x1805, 0x1807, - 0x1808, 0x1809, 0x180A, 0x1944, 0x1945, - 0x1A1E, 0x1A1F, 0x1AA0, 0x1AA1, 0x1AA2, - 0x1AA3, 0x1AA4, 0x1AA5, 0x1AA6, 0x1AA8, - 0x1AA9, 0x1AAA, 0x1AAB, 0x1AAC, 0x1AAD, - 0x1B5A, 0x1B5B, 0x1B5C, 0x1B5D, 0x1B5E, - 0x1B5F, 0x1B60, 0x1B7D, 0x1B7E, 0x1BFC, - 0x1BFD, 0x1BFE, 0x1BFF, 0x1C3B, 0x1C3C, - 0x1C3D, 0x1C3E, 0x1C3F, 0x1C7E, 0x1C7F, - 0x1CC0, 0x1CC1, 0x1CC2, 0x1CC3, 0x1CC4, - 0x1CC5, 0x1CC6, 0x1CC7, 0x1CD3, 0x2016, - 0x2017, 0x2020, 0x2021, 0x2022, 0x2023, - 0x2024, 0x2025, 0x2026, 0x2027, 0x2030, - 0x2031, 0x2032, 0x2033, 0x2034, 0x2035, - 0x2036, 0x2037, 0x2038, 0x203B, 0x203C, - 0x203D, 0x203E, 0x2041, 0x2042, 0x2043, - 0x2047, 0x2048, 0x2049, 0x204A, 0x204B, - 0x204C, 0x204D, 0x204E, 0x204F, 0x2050, - 0x2051, 0x2053, 0x2055, 0x2056, 0x2057, - 0x2058, 0x2059, 0x205A, 0x205B, 0x205C, - 0x205D, 0x205E, 0x2CF9, 0x2CFA, 0x2CFB, - 0x2CFC, 0x2CFE, 0x2CFF, 0x2D70, 0x2E00, - 0x2E01, 0x2E06, 0x2E07, 0x2E08, 0x2E0B, - 0x2E0E, 0x2E0F, 0x2E10, 0x2E11, 0x2E12, - 0x2E13, 0x2E14, 0x2E15, 0x2E16, 0x2E18, - 0x2E19, 0x2E1B, 0x2E1E, 0x2E1F, 0x2E2A, - 0x2E2B, 0x2E2C, 0x2E2D, 0x2E2E, 0x2E30, - 0x2E31, 0x2E32, 0x2E33, 0x2E34, 0x2E35, - 0x2E36, 0x2E37, 0x2E38, 0x2E39, 0x2E3C, - 0x2E3D, 0x2E3E, 0x2E3F, 0x2E41, 0x2E43, - 0x2E44, 0x2E45, 0x2E46, 0x2E47, 0x2E48, - 0x2E49, 0x2E4A, 0x2E4B, 0x2E4C, 0x2E4D, - 0x2E4E, 0x2E4F, 0x2E52, 0x2E53, 0x2E54, - 0x3001, 0x3002, 0x3003, 0x303D, 0x30FB, - 0xA4FE, 0xA4FF, 0xA60D, 0xA60E, 0xA60F, - 0xA673, 0xA67E, 0xA6F2, 0xA6F3, 0xA6F4, - 0xA6F5, 0xA6F6, 0xA6F7, 0xA874, 0xA875, - 0xA876, 0xA877, 0xA8CE, 0xA8CF, 0xA8F8, - 0xA8F9, 0xA8FA, 0xA8FC, 0xA92E, 0xA92F, - 0xA95F, 0xA9C1, 0xA9C2, 0xA9C3, 0xA9C4, - 0xA9C5, 0xA9C6, 0xA9C7, 0xA9C8, 0xA9C9, - 0xA9CA, 0xA9CB, 0xA9CC, 0xA9CD, 0xA9DE, - 0xA9DF, 0xAA5C, 0xAA5D, 0xAA5E, 0xAA5F, - 0xAADE, 0xAADF, 0xAAF0, 0xAAF1, 0xABEB, - 0xFE10, 0xFE11, 0xFE12, 0xFE13, 0xFE14, - 0xFE15, 0xFE16, 0xFE19, 0xFE30, 0xFE45, - 0xFE46, 0xFE49, 0xFE4A, 0xFE4B, 0xFE4C, - 0xFE50, 0xFE51, 0xFE52, 0xFE54, 0xFE55, - 0xFE56, 0xFE57, 0xFE5F, 0xFE60, 0xFE61, - 0xFE68, 0xFE6A, 0xFE6B, 0xFF01, 0xFF02, - 0xFF03, 0xFF05, 0xFF06, 0xFF07, 0xFF0A, - 0xFF0C, 0xFF0E, 0xFF0F, 0xFF1A, 0xFF1B, - 0xFF1F, 0xFF20, 0xFF3C, 0xFF61, 0xFF64, - 0xFF65, 0x1010, 0x1010, 0x1010, 0x1039, - 0x103D, 0x1056, 0x1085, 0x1091, 0x1093, - 0x10A5, 0x10A5, 0x10A5, 0x10A5, 0x10A5, - 0x10A5, 0x10A5, 0x10A5, 0x10A5, 0x10A7, - 0x10AF, 0x10AF, 0x10AF, 0x10AF, 0x10AF, - 0x10AF, 0x10AF, 0x10B3, 0x10B3, 0x10B3, - 0x10B3, 0x10B3, 0x10B3, 0x10B3, 0x10B9, - 0x10B9, 0x10B9, 0x10B9, 0x10F5, 0x10F5, - 0x10F5, 0x10F5, 0x10F5, 0x10F8, 0x10F8, - 0x10F8, 0x10F8, 0x1104, 0x1104, 0x1104, - 0x1104, 0x1104, 0x1104, 0x1104, 0x110B, - 0x110B, 0x110B, 0x110B, 0x110C, 0x110C, - 0x1114, 0x1114, 0x1114, 0x1114, 0x1117, - 0x1117, 0x111C, 0x111C, 0x111C, 0x111C, - 0x111C, 0x111D, 0x111D, 0x111D, 0x111D, - 0x1123, 0x1123, 0x1123, 0x1123, 0x1123, - 0x1123, 0x112A, 0x1144, 0x1144, 0x1144, - 0x1144, 0x1144, 0x1145, 0x1145, 0x1145, - 0x114C, 0x115C, 0x115C, 0x115C, 0x115C, - 0x115C, 0x115C, 0x115C, 0x115C, 0x115C, - 0x115C, 0x115C, 0x115C, 0x115C, 0x115C, - 0x115C, 0x115D, 0x115D, 0x115D, 0x115D, - 0x115D, 0x115D, 0x115D, 0x115D, 0x1164, - 0x1164, 0x1164, 0x1166, 0x1166, 0x1166, - 0x1166, 0x1166, 0x1166, 0x1166, 0x1166, - 0x1166, 0x1166, 0x1166, 0x1166, 0x1166, - 0x116B, 0x1173, 0x1173, 0x1173, 0x1183, - 0x1194, 0x1194, 0x1194, 0x119E, 0x11A3, - 0x11A4, 0x11A4, 0x11A4, 0x11A4, 0x11A4, - 0x11A4, 0x11A4, 0x11A9, 0x11A9, 0x11A9, - 0x11A9, 0x11A9, 0x11AA, 0x11AA, 0x11AA, - 0x11C4, 0x11C4, 0x11C4, 0x11C4, 0x11C4, - 0x11C7, 0x11C7, 0x11EF, 0x11EF, 0x11FF, - 0x1247, 0x1247, 0x1247, 0x1247, 0x1247, - 0x12FF, 0x12FF, 0x16A6, 0x16A6, 0x16AF, - 0x16B3, 0x16B3, 0x16B3, 0x16B3, 0x16B3, - 0x16B4, 0x16E9, 0x16E9, 0x16E9, 0x16E9, - 0x16FE, 0x1BC9, 0x1DA8, 0x1DA8, 0x1DA8, - 0x1DA8, 0x1DA8, 0x1E95, 0x1E95, 0x0028, - 0x005B, 0x007B, 0x0F3A, 0x0F3C, 0x169B, - 0x201A, 0x201E, 0x2045, 0x207D, 0x208D, - 0x2308, 0x230A, 0x2329, 0x2768, 0x276A, - 0x276C, 0x276E, 0x2770, 0x2772, 0x2774, - 0x27C5, 0x27E6, 0x27E8, 0x27EA, 0x27EC, - 0x27EE, 0x2983, 0x2985, 0x2987, 0x2989, - 0x298B, 0x298D, 0x298F, 0x2991, 0x2993, - 0x2995, 0x2997, 0x29D8, 0x29DA, 0x29FC, - 0x2E22, 0x2E24, 0x2E26, 0x2E28, 0x2E42, - 0x2E55, 0x2E57, 0x2E59, 0x2E5B, 0x3008, - 0x300A, 0x300C, 0x300E, 0x3010, 0x3014, - 0x3016, 0x3018, 0x301A, 0x301D, 0xFD3F, - 0xFE17, 0xFE35, 0xFE37, 0xFE39, 0xFE3B, - 0xFE3D, 0xFE3F, 0xFE41, 0xFE43, 0xFE47, - 0xFE59, 0xFE5B, 0xFE5D, 0xFF08, 0xFF3B, - 0xFF5B, 0xFF5F, 0xFF62]) +const NORG_PUNCTUATION = Set( + Char[ + 0x0024, + 0x002B, + 0x003C, + 0x003D, + 0x003E, + 0x005E, + 0x0060, + 0x007C, + 0x007E, + 0x005F, + 0x203F, + 0x2040, + 0x2054, + 0xFE33, + 0xFE34, + 0xFE4D, + 0xFE4E, + 0xFE4F, + 0xFF3F, + 0x002D, + 0x058A, + 0x05BE, + 0x1400, + 0x1806, + 0x2010, + 0x2011, + 0x2012, + 0x2013, + 0x2014, + 0x2015, + 0x2E17, + 0x2E1A, + 0x2E3A, + 0x2E3B, + 0x2E40, + 0x2E5D, + 0x301C, + 0x3030, + 0x30A0, + 0xFE31, + 0xFE32, + 0xFE58, + 0xFE63, + 0xFF0D, + 0x10EAD, + 0x0029, + 0x005D, + 0x007D, + 0x0F3B, + 0x0F3D, + 0x169C, + 0x2046, + 0x207E, + 0x208E, + 0x2309, + 0x230B, + 0x232A, + 0x2769, + 0x276B, + 0x276D, + 0x276F, + 0x2771, + 0x2773, + 0x2775, + 0x27C6, + 0x27E7, + 0x27E9, + 0x27EB, + 0x27ED, + 0x27EF, + 0x2984, + 0x2986, + 0x2988, + 0x298A, + 0x298C, + 0x298E, + 0x2990, + 0x2992, + 0x2994, + 0x2996, + 0x2998, + 0x29D9, + 0x29DB, + 0x29FD, + 0x2E23, + 0x2E25, + 0x2E27, + 0x2E29, + 0x2E56, + 0x2E58, + 0x2E5A, + 0x2E5C, + 0x3009, + 0x300B, + 0x300D, + 0x300F, + 0x3011, + 0x3015, + 0x3017, + 0x3019, + 0x301B, + 0x301E, + 0x301F, + 0xFD3E, + 0xFE18, + 0xFE36, + 0xFE38, + 0xFE3A, + 0xFE3C, + 0xFE3E, + 0xFE40, + 0xFE42, + 0xFE44, + 0xFE48, + 0xFE5A, + 0xFE5C, + 0xFE5E, + 0xFF09, + 0xFF3D, + 0xFF5D, + 0xFF60, + 0xFF63, + 0x00BB, + 0x2019, + 0x201D, + 0x203A, + 0x2E03, + 0x2E05, + 0x2E0A, + 0x2E0D, + 0x2E1D, + 0x2E21, + 0x00AB, + 0x2018, + 0x201B, + 0x201C, + 0x201F, + 0x2039, + 0x2E02, + 0x2E04, + 0x2E09, + 0x2E0C, + 0x2E1C, + 0x2E20, + 0x0021, + 0x0022, + 0x0023, + 0x0025, + 0x0026, + 0x0027, + 0x002A, + 0x002C, + 0x002E, + 0x002F, + 0x003A, + 0x003B, + 0x003F, + 0x0040, + 0x005C, + 0x00A1, + 0x00A7, + 0x00B6, + 0x00B7, + 0x00BF, + 0x037E, + 0x0387, + 0x055A, + 0x055B, + 0x055C, + 0x055D, + 0x055E, + 0x055F, + 0x0589, + 0x05C0, + 0x05C3, + 0x05C6, + 0x05F3, + 0x05F4, + 0x0609, + 0x060A, + 0x060C, + 0x060D, + 0x061B, + 0x061D, + 0x061E, + 0x061F, + 0x066A, + 0x066B, + 0x066C, + 0x066D, + 0x06D4, + 0x0700, + 0x0701, + 0x0702, + 0x0703, + 0x0704, + 0x0705, + 0x0706, + 0x0707, + 0x0708, + 0x0709, + 0x070A, + 0x070B, + 0x070C, + 0x070D, + 0x07F7, + 0x07F8, + 0x07F9, + 0x0830, + 0x0831, + 0x0832, + 0x0833, + 0x0834, + 0x0835, + 0x0836, + 0x0837, + 0x0838, + 0x0839, + 0x083A, + 0x083B, + 0x083C, + 0x083D, + 0x083E, + 0x085E, + 0x0964, + 0x0965, + 0x0970, + 0x09FD, + 0x0A76, + 0x0AF0, + 0x0C77, + 0x0C84, + 0x0DF4, + 0x0E4F, + 0x0E5A, + 0x0E5B, + 0x0F04, + 0x0F05, + 0x0F06, + 0x0F07, + 0x0F08, + 0x0F09, + 0x0F0A, + 0x0F0B, + 0x0F0C, + 0x0F0D, + 0x0F0E, + 0x0F0F, + 0x0F10, + 0x0F11, + 0x0F12, + 0x0F14, + 0x0F85, + 0x0FD0, + 0x0FD1, + 0x0FD2, + 0x0FD3, + 0x0FD4, + 0x0FD9, + 0x0FDA, + 0x104A, + 0x104B, + 0x104C, + 0x104D, + 0x104E, + 0x104F, + 0x10FB, + 0x1360, + 0x1361, + 0x1362, + 0x1363, + 0x1364, + 0x1365, + 0x1366, + 0x1367, + 0x1368, + 0x166E, + 0x16EB, + 0x16EC, + 0x16ED, + 0x1735, + 0x1736, + 0x17D4, + 0x17D5, + 0x17D6, + 0x17D8, + 0x17D9, + 0x17DA, + 0x1800, + 0x1801, + 0x1802, + 0x1803, + 0x1804, + 0x1805, + 0x1807, + 0x1808, + 0x1809, + 0x180A, + 0x1944, + 0x1945, + 0x1A1E, + 0x1A1F, + 0x1AA0, + 0x1AA1, + 0x1AA2, + 0x1AA3, + 0x1AA4, + 0x1AA5, + 0x1AA6, + 0x1AA8, + 0x1AA9, + 0x1AAA, + 0x1AAB, + 0x1AAC, + 0x1AAD, + 0x1B5A, + 0x1B5B, + 0x1B5C, + 0x1B5D, + 0x1B5E, + 0x1B5F, + 0x1B60, + 0x1B7D, + 0x1B7E, + 0x1BFC, + 0x1BFD, + 0x1BFE, + 0x1BFF, + 0x1C3B, + 0x1C3C, + 0x1C3D, + 0x1C3E, + 0x1C3F, + 0x1C7E, + 0x1C7F, + 0x1CC0, + 0x1CC1, + 0x1CC2, + 0x1CC3, + 0x1CC4, + 0x1CC5, + 0x1CC6, + 0x1CC7, + 0x1CD3, + 0x2016, + 0x2017, + 0x2020, + 0x2021, + 0x2022, + 0x2023, + 0x2024, + 0x2025, + 0x2026, + 0x2027, + 0x2030, + 0x2031, + 0x2032, + 0x2033, + 0x2034, + 0x2035, + 0x2036, + 0x2037, + 0x2038, + 0x203B, + 0x203C, + 0x203D, + 0x203E, + 0x2041, + 0x2042, + 0x2043, + 0x2047, + 0x2048, + 0x2049, + 0x204A, + 0x204B, + 0x204C, + 0x204D, + 0x204E, + 0x204F, + 0x2050, + 0x2051, + 0x2053, + 0x2055, + 0x2056, + 0x2057, + 0x2058, + 0x2059, + 0x205A, + 0x205B, + 0x205C, + 0x205D, + 0x205E, + 0x2CF9, + 0x2CFA, + 0x2CFB, + 0x2CFC, + 0x2CFE, + 0x2CFF, + 0x2D70, + 0x2E00, + 0x2E01, + 0x2E06, + 0x2E07, + 0x2E08, + 0x2E0B, + 0x2E0E, + 0x2E0F, + 0x2E10, + 0x2E11, + 0x2E12, + 0x2E13, + 0x2E14, + 0x2E15, + 0x2E16, + 0x2E18, + 0x2E19, + 0x2E1B, + 0x2E1E, + 0x2E1F, + 0x2E2A, + 0x2E2B, + 0x2E2C, + 0x2E2D, + 0x2E2E, + 0x2E30, + 0x2E31, + 0x2E32, + 0x2E33, + 0x2E34, + 0x2E35, + 0x2E36, + 0x2E37, + 0x2E38, + 0x2E39, + 0x2E3C, + 0x2E3D, + 0x2E3E, + 0x2E3F, + 0x2E41, + 0x2E43, + 0x2E44, + 0x2E45, + 0x2E46, + 0x2E47, + 0x2E48, + 0x2E49, + 0x2E4A, + 0x2E4B, + 0x2E4C, + 0x2E4D, + 0x2E4E, + 0x2E4F, + 0x2E52, + 0x2E53, + 0x2E54, + 0x3001, + 0x3002, + 0x3003, + 0x303D, + 0x30FB, + 0xA4FE, + 0xA4FF, + 0xA60D, + 0xA60E, + 0xA60F, + 0xA673, + 0xA67E, + 0xA6F2, + 0xA6F3, + 0xA6F4, + 0xA6F5, + 0xA6F6, + 0xA6F7, + 0xA874, + 0xA875, + 0xA876, + 0xA877, + 0xA8CE, + 0xA8CF, + 0xA8F8, + 0xA8F9, + 0xA8FA, + 0xA8FC, + 0xA92E, + 0xA92F, + 0xA95F, + 0xA9C1, + 0xA9C2, + 0xA9C3, + 0xA9C4, + 0xA9C5, + 0xA9C6, + 0xA9C7, + 0xA9C8, + 0xA9C9, + 0xA9CA, + 0xA9CB, + 0xA9CC, + 0xA9CD, + 0xA9DE, + 0xA9DF, + 0xAA5C, + 0xAA5D, + 0xAA5E, + 0xAA5F, + 0xAADE, + 0xAADF, + 0xAAF0, + 0xAAF1, + 0xABEB, + 0xFE10, + 0xFE11, + 0xFE12, + 0xFE13, + 0xFE14, + 0xFE15, + 0xFE16, + 0xFE19, + 0xFE30, + 0xFE45, + 0xFE46, + 0xFE49, + 0xFE4A, + 0xFE4B, + 0xFE4C, + 0xFE50, + 0xFE51, + 0xFE52, + 0xFE54, + 0xFE55, + 0xFE56, + 0xFE57, + 0xFE5F, + 0xFE60, + 0xFE61, + 0xFE68, + 0xFE6A, + 0xFE6B, + 0xFF01, + 0xFF02, + 0xFF03, + 0xFF05, + 0xFF06, + 0xFF07, + 0xFF0A, + 0xFF0C, + 0xFF0E, + 0xFF0F, + 0xFF1A, + 0xFF1B, + 0xFF1F, + 0xFF20, + 0xFF3C, + 0xFF61, + 0xFF64, + 0xFF65, + 0x1010, + 0x1010, + 0x1010, + 0x1039, + 0x103D, + 0x1056, + 0x1085, + 0x1091, + 0x1093, + 0x10A5, + 0x10A5, + 0x10A5, + 0x10A5, + 0x10A5, + 0x10A5, + 0x10A5, + 0x10A5, + 0x10A5, + 0x10A7, + 0x10AF, + 0x10AF, + 0x10AF, + 0x10AF, + 0x10AF, + 0x10AF, + 0x10AF, + 0x10B3, + 0x10B3, + 0x10B3, + 0x10B3, + 0x10B3, + 0x10B3, + 0x10B3, + 0x10B9, + 0x10B9, + 0x10B9, + 0x10B9, + 0x10F5, + 0x10F5, + 0x10F5, + 0x10F5, + 0x10F5, + 0x10F8, + 0x10F8, + 0x10F8, + 0x10F8, + 0x1104, + 0x1104, + 0x1104, + 0x1104, + 0x1104, + 0x1104, + 0x1104, + 0x110B, + 0x110B, + 0x110B, + 0x110B, + 0x110C, + 0x110C, + 0x1114, + 0x1114, + 0x1114, + 0x1114, + 0x1117, + 0x1117, + 0x111C, + 0x111C, + 0x111C, + 0x111C, + 0x111C, + 0x111D, + 0x111D, + 0x111D, + 0x111D, + 0x1123, + 0x1123, + 0x1123, + 0x1123, + 0x1123, + 0x1123, + 0x112A, + 0x1144, + 0x1144, + 0x1144, + 0x1144, + 0x1144, + 0x1145, + 0x1145, + 0x1145, + 0x114C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115C, + 0x115D, + 0x115D, + 0x115D, + 0x115D, + 0x115D, + 0x115D, + 0x115D, + 0x115D, + 0x1164, + 0x1164, + 0x1164, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x1166, + 0x116B, + 0x1173, + 0x1173, + 0x1173, + 0x1183, + 0x1194, + 0x1194, + 0x1194, + 0x119E, + 0x11A3, + 0x11A4, + 0x11A4, + 0x11A4, + 0x11A4, + 0x11A4, + 0x11A4, + 0x11A4, + 0x11A9, + 0x11A9, + 0x11A9, + 0x11A9, + 0x11A9, + 0x11AA, + 0x11AA, + 0x11AA, + 0x11C4, + 0x11C4, + 0x11C4, + 0x11C4, + 0x11C4, + 0x11C7, + 0x11C7, + 0x11EF, + 0x11EF, + 0x11FF, + 0x1247, + 0x1247, + 0x1247, + 0x1247, + 0x1247, + 0x12FF, + 0x12FF, + 0x16A6, + 0x16A6, + 0x16AF, + 0x16B3, + 0x16B3, + 0x16B3, + 0x16B3, + 0x16B3, + 0x16B4, + 0x16E9, + 0x16E9, + 0x16E9, + 0x16E9, + 0x16FE, + 0x1BC9, + 0x1DA8, + 0x1DA8, + 0x1DA8, + 0x1DA8, + 0x1DA8, + 0x1E95, + 0x1E95, + 0x0028, + 0x005B, + 0x007B, + 0x0F3A, + 0x0F3C, + 0x169B, + 0x201A, + 0x201E, + 0x2045, + 0x207D, + 0x208D, + 0x2308, + 0x230A, + 0x2329, + 0x2768, + 0x276A, + 0x276C, + 0x276E, + 0x2770, + 0x2772, + 0x2774, + 0x27C5, + 0x27E6, + 0x27E8, + 0x27EA, + 0x27EC, + 0x27EE, + 0x2983, + 0x2985, + 0x2987, + 0x2989, + 0x298B, + 0x298D, + 0x298F, + 0x2991, + 0x2993, + 0x2995, + 0x2997, + 0x29D8, + 0x29DA, + 0x29FC, + 0x2E22, + 0x2E24, + 0x2E26, + 0x2E28, + 0x2E42, + 0x2E55, + 0x2E57, + 0x2E59, + 0x2E5B, + 0x3008, + 0x300A, + 0x300C, + 0x300E, + 0x3010, + 0x3014, + 0x3016, + 0x3018, + 0x301A, + 0x301D, + 0xFD3F, + 0xFE17, + 0xFE35, + 0xFE37, + 0xFE39, + 0xFE3B, + 0xFE3D, + 0xFE3F, + 0xFE41, + 0xFE43, + 0xFE47, + 0xFE59, + 0xFE5B, + 0xFE5D, + 0xFF08, + 0xFF3B, + 0xFF5B, + 0xFF5F, + 0xFF62, + ], +) diff --git a/src/assets/norg_whitespace.jl b/src/assets/norg_whitespace.jl index 8ac270f..2fc07b5 100644 --- a/src/assets/norg_whitespace.jl +++ b/src/assets/norg_whitespace.jl @@ -1,21 +1,25 @@ -const NORG_WHITESPACES = Set(Char[0x0009, # tab - # 0x000A, # line feed - 0x000C, # form feed - 0x000D, # carriage return - 0x0020, # space - 0x00A0, # no-break space - 0x1680, # Ogham space mark - 0x2000, # en quad - 0x2001, # em quad - 0x2002, # en space - 0x2003, # em space - 0x2004, # three-per-em space - 0x2005, # four-per-em space - 0x2006, # six-per-em space - 0x2007, # figure space - 0x2008, # punctuation space - 0x2009, # thin space - 0x200A, # hair space - 0x202F, # narrow no-break space - 0x205F, # medium mathematical space - 0x3000]) +const NORG_WHITESPACES = Set( + Char[ + 0x0009, # tab + # 0x000A, # line feed + 0x000C, # form feed + 0x000D, # carriage return + 0x0020, # space + 0x00A0, # no-break space + 0x1680, # Ogham space mark + 0x2000, # en quad + 0x2001, # em quad + 0x2002, # en space + 0x2003, # em space + 0x2004, # three-per-em space + 0x2005, # four-per-em space + 0x2006, # six-per-em space + 0x2007, # figure space + 0x2008, # punctuation space + 0x2009, # thin space + 0x200A, # hair space + 0x202F, # narrow no-break space + 0x205F, # medium mathematical space + 0x3000, + ], +) diff --git a/src/ast.jl b/src/ast.jl index d049368..9a27688 100644 --- a/src/ast.jl +++ b/src/ast.jl @@ -18,7 +18,7 @@ struct Node start::Int stop::Int end -Node(kind::Kind)= Node(kind, Node[], 1, 1) +Node(kind::Kind) = Node(kind, Node[], 1, 1) """ Stores the Abstract Syntax Tree (AST) for a Norg document. It implements the @@ -27,9 +27,11 @@ Stores the Abstract Syntax Tree (AST) for a Norg document. It implements the struct NorgDocument root::Node tokens::Vector{Token} - targets::Dict{String, Tuple{Kind,Ref{Node}}} + targets::Dict{String,Tuple{Kind,Ref{Node}}} +end +function NorgDocument(root, tokens) + return NorgDocument(root, tokens, Dict{String,Tuple{Kind,Ref{Node}}}()) end -NorgDocument(root, tokens) = NorgDocument(root, tokens, Dict{String, Tuple{Kind,Ref{Node}}}()) Kinds.kind(::NorgDocument) = K"NorgDocument" Kinds.kind(node::Node) = node.kind @@ -70,10 +72,17 @@ Kinds.is_heading(node::Node) = is_heading(kind(node)) Kinds.is_unordered_list(node::Node) = is_unordered_list(kind(node)) Kinds.is_ordered_list(node::Node) = is_ordered_list(kind(node)) Kinds.is_quote(node::Node) = is_quote(kind(node)) -is_first_class_node(k::Kind) = k ∈ [K"Paragraph", K"Verbatim"] || is_detached_modifier(k) || is_nestable(k) || is_heading(k) +function is_first_class_node(k::Kind) + return k ∈ [K"Paragraph", K"Verbatim"] || + is_detached_modifier(k) || + is_nestable(k) || + is_heading(k) +end is_first_class_node(node::Node) = is_first_class_node(kind(node)) -litteral(ast::NorgDocument, node::Node) = join(map(value, ast.tokens[start(node):stop(node)])) +function litteral(ast::NorgDocument, node::Node) + return join(map(value, ast.tokens[start(node):stop(node)])) +end heading_level(node::Node) = heading_level(kind(node)) function heading_level(k::Kind) @@ -109,7 +118,7 @@ function heading_kind(level::Int) K"Heading4" elseif level == 5 K"Heading5" - else + else K"Heading6" end end @@ -148,7 +157,7 @@ function unordered_list_level(level::Int) K"UnorderedList4" elseif level == 5 K"UnorderedList5" - else + else K"UnorderedList6" end end @@ -187,7 +196,7 @@ function ordered_list_level(level::Int) K"OrderedList4" elseif level == 5 K"OrderedList5" - else + else K"OrderedList6" end end @@ -226,7 +235,7 @@ function quote_level(level::Int) K"Quote4" elseif level == 5 K"Quote5" - else + else K"Quote6" end end @@ -245,6 +254,15 @@ function nestable_level(k::Kind) end end -export is_first_class_node, heading_kind, heading_level, unordered_list_level, ordered_list_level, quote_level, nestable_level, litteral, NorgDocument, Node +export is_first_class_node, + heading_kind, + heading_level, + unordered_list_level, + ordered_list_level, + quote_level, + nestable_level, + litteral, + NorgDocument, + Node end diff --git a/src/codegen.jl b/src/codegen.jl index 965cd7d..195407e 100644 --- a/src/codegen.jl +++ b/src/codegen.jl @@ -23,11 +23,11 @@ abstract type CodegenTarget end Do code generation for a given [`AST.NorgDocument`](@ref) to a given target. """ function codegen end -codegen(t::Type{T}, ast::AST.NorgDocument) where {T <: CodegenTarget} = codegen(t(), ast) +codegen(t::Type{T}, ast::AST.NorgDocument) where {T<:CodegenTarget} = codegen(t(), ast) -function codegen(t::T, ast::AST.NorgDocument, node::AST.Node) where {T <: CodegenTarget} +function codegen(t::T, ast::AST.NorgDocument, node::AST.Node) where {T<:CodegenTarget} if kind(node) == K"Paragraph" - codegen(t, Paragraph(), ast, node) + codegen(t, Paragraph(), ast, node) elseif kind(node) == K"ParagraphSegment" codegen(t, ParagraphSegment(), ast, node) elseif kind(node) == K"Bold" @@ -124,7 +124,8 @@ function codegen(t::T, ast::AST.NorgDocument, node::AST.Node) where {T <: Codege codegen(t, StandardRangedTag(), ast, node) elseif kind(node) == K"TodoExtension" codegen(t, TodoExtension(), ast, node) - elseif kind(node) ∈ KSet"TimestampExtension PriorityExtension DueDateExtension StartDateExtension" + elseif kind(node) ∈ + KSet"TimestampExtension PriorityExtension DueDateExtension StartDateExtension" codegen(t, Word(), ast, node) elseif kind(node) == K"WeakCarryoverTag" codegen(t, WeakCarryoverTag(), ast, node) @@ -143,12 +144,10 @@ function codegen(t::T, ast::AST.NorgDocument, node::AST.Node) where {T <: Codege t_stop = ast.tokens[AST.stop(node)] error("""$T codegen got an unhandled node type: $(kind(node)). Faulty node starts at line $(line(t_start)), col. $(char(t_start)) - and stops at line $(line(t_stop)), col. $(char(t_stop)).""" - ) + and stops at line $(line(t_stop)), col. $(char(t_stop)).""") end end - include("codegen/html.jl") include("codegen/json.jl") using .HTMLCodegen diff --git a/src/codegen/html.jl b/src/codegen/html.jl index 52754b2..a4e87e2 100644 --- a/src/codegen/html.jl +++ b/src/codegen/html.jl @@ -22,20 +22,20 @@ heading level `i` by setting `HiFootnotes` or at the root of the document or directly as it appears in the Norg document. """ @enum FootnotesLevel begin - RootFootnotes=0 - H1Footnotes=1 - H2Footnotes=2 - H3Footnotes=3 - H4Footnotes=4 - H5Footnotes=5 - H6Footnotes=6 - InplaceFootnotes=7 + RootFootnotes = 0 + H1Footnotes = 1 + H2Footnotes = 2 + H3Footnotes = 3 + H4Footnotes = 4 + H5Footnotes = 5 + H6Footnotes = 6 + InplaceFootnotes = 7 end """ HTML target to feed [`codegen`](@ref). """ -struct HTMLTarget <: CodegenTarget +struct HTMLTarget <: CodegenTarget footnotes_level::FootnotesLevel end @@ -46,7 +46,7 @@ A special target for link location, this ensure type-stability. """ struct HTMLLocationTarget <: CodegenTarget end -function do_footnote_item(ast, item) +function do_footnote_item(ast, item) term, note... = children(item) term_id = "fn_" * idify(textify(ast, term)) backref = "#fnref_" * idify(textify(ast, term)) @@ -64,7 +64,9 @@ function codegen(t::HTMLTarget, ast::NorgDocument) footnotes = getchildren(ast.root, K"Footnote") items = Iterators.flatten(children.(footnotes)) else # collect all orphan footnotes - footnotes = getchildren(ast.root, K"Footnote", AST.heading_kind(Int(t.footnotes_level))) + footnotes = getchildren( + ast.root, K"Footnote", AST.heading_kind(Int(t.footnotes_level)) + ) items = Iterators.flatten(children.(footnotes)) end footnotes_node = @htl """ @@ -98,25 +100,27 @@ function codegen(t::HTMLTarget, ::ParagraphSegment, ast::NorgDocument, node::Nod @htl "$res" end -html_node(::Union{FreeFormBold, Bold}) = "b" -html_node(::Union{FreeFormItalic, Italic}) = "i" -html_node(::Union{FreeFormUnderline, Underline}) = "ins" -html_node(::Union{FreeFormStrikethrough, Strikethrough}) = "del" -html_node(::Union{FreeFormSpoiler, Spoiler}) = "span" -html_node(::Union{FreeFormSuperscript, Superscript}) = "sup" -html_node(::Union{FreeFormSubscript, Subscript}) = "sub" -html_node(::Union{FreeFormInlineCode, InlineCode}) = "code" - -html_class(::Union{FreeFormBold, Bold}) = [] -html_class(::Union{FreeFormItalic, Italic}) = [] -html_class(::Union{FreeFormUnderline, Underline}) = [] -html_class(::Union{FreeFormStrikethrough, Strikethrough}) = [] -html_class(::Union{FreeFormSpoiler, Spoiler}) = ["spoiler"] -html_class(::Union{FreeFormSuperscript, Superscript}) = [] -html_class(::Union{FreeFormSubscript, Subscript}) = [] -html_class(::Union{FreeFormInlineCode, InlineCode}) = [] - -function codegen(t::HTMLTarget, s::T, ast::NorgDocument, node::Node) where {T<:AttachedModifierStrategy} +html_node(::Union{FreeFormBold,Bold}) = "b" +html_node(::Union{FreeFormItalic,Italic}) = "i" +html_node(::Union{FreeFormUnderline,Underline}) = "ins" +html_node(::Union{FreeFormStrikethrough,Strikethrough}) = "del" +html_node(::Union{FreeFormSpoiler,Spoiler}) = "span" +html_node(::Union{FreeFormSuperscript,Superscript}) = "sup" +html_node(::Union{FreeFormSubscript,Subscript}) = "sub" +html_node(::Union{FreeFormInlineCode,InlineCode}) = "code" + +html_class(::Union{FreeFormBold,Bold}) = [] +html_class(::Union{FreeFormItalic,Italic}) = [] +html_class(::Union{FreeFormUnderline,Underline}) = [] +html_class(::Union{FreeFormStrikethrough,Strikethrough}) = [] +html_class(::Union{FreeFormSpoiler,Spoiler}) = ["spoiler"] +html_class(::Union{FreeFormSuperscript,Superscript}) = [] +html_class(::Union{FreeFormSubscript,Subscript}) = [] +html_class(::Union{FreeFormInlineCode,InlineCode}) = [] + +function codegen( + t::HTMLTarget, s::T, ast::NorgDocument, node::Node +) where {T<:AttachedModifierStrategy} res = HTR[] for c in children(node) push!(res, codegen(t, ast, c)) @@ -129,11 +133,15 @@ function codegen(t::HTMLTarget, s::T, ast::NorgDocument, node::Node) where {T<:A end end -function codegen(t::HTMLTarget, ::Union{NullModifier, FreeFormNullModifier}, ast::NorgDocument, node::Node) +function codegen( + t::HTMLTarget, ::Union{NullModifier,FreeFormNullModifier}, ast::NorgDocument, node::Node +) @htl "" end -function codegen(t::HTMLTarget, ::Union{InlineMath, FreeFormInlineMath}, ast::NorgDocument, node::Node) +function codegen( + t::HTMLTarget, ::Union{InlineMath,FreeFormInlineMath}, ast::NorgDocument, node::Node +) res = HTR[] for c in children(node) push!(res, codegen(t, ast, c)) @@ -141,7 +149,9 @@ function codegen(t::HTMLTarget, ::Union{InlineMath, FreeFormInlineMath}, ast::No @htl "\$$res\$" end -function codegen(t::HTMLTarget, ::Union{Variable, FreeFormVariable}, ast::NorgDocument, node::Node) +function codegen( + t::HTMLTarget, ::Union{Variable,FreeFormVariable}, ast::NorgDocument, node::Node +) @htl "" end @@ -205,15 +215,17 @@ function codegen(t::HTMLTarget, ::Link, ast::NorgDocument, node::Node) end function codegen(t::HTMLLocationTarget, ::URLLocation, ast::NorgDocument, node::Node) - textify(ast, first(children(node))) + return textify(ast, first(children(node))) end function codegen(t::HTMLLocationTarget, ::LineNumberLocation, ast::NorgDocument, node::Node) # Who are you, people who link to line location ? - "#l-$(textify(ast, first(children(node))))" + return "#l-$(textify(ast, first(children(node))))" end -function codegen(t::HTMLLocationTarget, ::DetachedModifierLocation, ast::NorgDocument, node::Node) +function codegen( + t::HTMLLocationTarget, ::DetachedModifierLocation, ast::NorgDocument, node::Node +) kindoftarget = kind(first(children(node))) title_node = last(children(node)) title = textify(ast, title_node) @@ -226,7 +238,9 @@ function codegen(t::HTMLLocationTarget, ::DetachedModifierLocation, ast::NorgDoc elseif kindoftarget == K"Footnote" "#" * "fn_" * idify(title) else - error("HTML code generation received an unknown Detached Modifier location: $kindoftarget") + error( + "HTML code generation received an unknown Detached Modifier location: $kindoftarget", + ) end end @@ -244,56 +258,58 @@ function codegen(t::HTMLLocationTarget, ::MagicLocation, ast::NorgDocument, node elseif kindoftarget == K"Footnote" "#" * "fn_" * idify(title) else - error("HTML code generation received an unknown Detached Modifier location: $kindoftarget") + error( + "HTML code generation received an unknown Detached Modifier location: $kindoftarget", + ) end else - "" + "" end end function codegen(t::HTMLLocationTarget, ::FileLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) if kind(target) == K"FileNorgRootTarget" - start = "/" + start = "/" else - start = "" + start = "" end target_loc = textify(ast, target) if kind(subtarget) == K"None" - subtarget_loc = "" + subtarget_loc = "" else subtarget_loc = "#" * codegen(t, ast, subtarget)::String end - - start * target_loc * subtarget_loc + + return start * target_loc * subtarget_loc end function codegen(t::HTMLLocationTarget, ::NorgFileLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) if kind(target) == K"FileNorgRootTarget" - start = "/" + start = "/" else - start = "" + start = "" end target_loc = textify(ast, target) if kind(subtarget) == K"None" - subtarget_loc = "" + subtarget_loc = "" else subtarget_loc = "#" * codegen(t, ast, subtarget)::String end - - start * target_loc * subtarget_loc + + return start * target_loc * subtarget_loc end function codegen(t::HTMLLocationTarget, ::WikiLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) target_loc = textify(ast, target) if kind(subtarget) == K"None" - subtarget_loc = "" + subtarget_loc = "" else subtarget_loc = "#" * codegen(t, ast, subtarget)::String end - "/" * target_loc * subtarget_loc + return "/" * target_loc * subtarget_loc end function codegen(t::HTMLLocationTarget, ::TimestampLocation, ast::NorgDocument, node::Node) @@ -306,7 +322,7 @@ function codegen(t::HTMLLocationTarget, ::TimestampLocation, ast::NorgDocument, end res else - "" + "" end end @@ -425,12 +441,14 @@ function codegen(t::HTMLTarget, ::StandardRangedTag, ast::NorgDocument, node::No
        $([codegen(t, ast, c) for c in children(last(others))])
        - """ + """ else - @warn "Unknown standard ranged tag." tag_litteral ast.tokens[AST.start(node)] ast.tokens[AST.stop(node)] + @warn "Unknown standard ranged tag." tag_litteral ast.tokens[AST.start(node)] ast.tokens[AST.stop( + node + )] @htl """ $([codegen(t, ast, c) for c in children(last(others))]) - """ + """ end end @@ -469,7 +487,12 @@ function codegen(::HTMLTarget, ::TodoExtension, ast::NorgDocument, node::Node) end end -function codegen(t::HTMLTarget, ::Union{WeakCarryoverTag, StrongCarryoverTag}, ast::NorgDocument, node::Node) +function codegen( + t::HTMLTarget, + ::Union{WeakCarryoverTag,StrongCarryoverTag}, + ast::NorgDocument, + node::Node, +) content = codegen(t, ast, last(children(node))) params = Dict{Symbol,String}() if length(children(node)) <= 2 @@ -479,7 +502,7 @@ function codegen(t::HTMLTarget, ::Union{WeakCarryoverTag, StrongCarryoverTag}, a param = textify(ast, children(node)[2]) params[Symbol(label)] = param else - class = join(textify.(Ref(ast), children(node)[begin:end-1]), "-") + class = join(textify.(Ref(ast), children(node)[begin:(end - 1)]), "-") params[:class] = class end @htl "
        $content
        " @@ -487,14 +510,15 @@ end function codegen(t::HTMLTarget, ::Definition, ast::NorgDocument, node::Node) items = children(node) - content = Iterators.flatten(map(items) do item + content = collect(Iterators.flatten( + map(items) do item term, def... = children(item) term_id = "def_" * idify(textify(ast, term)) term_node = @htl "
        $(codegen(t, ast, term))
        " def_node = @htl "
        $(codegen.(Ref(t), Ref(ast), def))
        " - term_node,def_node - end - ) |> collect + term_node, def_node + end, + )) @htl "
        $content
        " end diff --git a/src/codegen/json.jl b/src/codegen/json.jl index 04d9e11..8d99191 100644 --- a/src/codegen/json.jl +++ b/src/codegen/json.jl @@ -18,11 +18,11 @@ import ..codegen import ..textify import ..idify -jsonify(p::Pair{Symbol, Symbol}) = "$(jsonify(first(p))):$(jsonify(last(p)))" -jsonify(p::Pair{Symbol, Int}) = "$(jsonify(first(p))):$(jsonify(last(p)))" -jsonify(p::Pair{Symbol, String}) = "$(jsonify(first(p))):$(jsonify(last(p)))" -jsonify(a::Vector{Pair{Symbol, String}}) = "{"*join(jsonify.(a)::Vector{String}, ",")*"}" -jsonify(a::Vector) = "["*join(jsonify.(a), ",")*"]" +jsonify(p::Pair{Symbol,Symbol}) = "$(jsonify(first(p))):$(jsonify(last(p)))" +jsonify(p::Pair{Symbol,Int}) = "$(jsonify(first(p))):$(jsonify(last(p)))" +jsonify(p::Pair{Symbol,String}) = "$(jsonify(first(p))):$(jsonify(last(p)))" +jsonify(a::Vector{Pair{Symbol,String}}) = "{" * join(jsonify.(a)::Vector{String}, ",") * "}" +jsonify(a::Vector) = "[" * join(jsonify.(a), ",") * "]" jsonify(x::String) = x jsonify(x::Int) = string(x) jsonify(x::Symbol) = "\"$(x)\"" @@ -33,7 +33,7 @@ JSON target to feed [`codegen`](@ref). You can specify a pandoc api version, but this only changes the version number announced in the generated output. """ -struct JSONTarget <: CodegenTarget +struct JSONTarget <: CodegenTarget pandocapiversion::Vector{Int} end JSONTarget() = JSONTarget([1, 23]) @@ -42,7 +42,11 @@ JSONTarget() = JSONTarget([1, 23]) A special target for link location, this ensure type-stability. """ struct JSONLocationTarget <: CodegenTarget end -codegen(::JSONLocationTarget, _, _, _) = error("Trying to generate a non location node with target `JSONLocationTarget`. You found a bug in JSON code generation.") +function codegen(::JSONLocationTarget, _, _, _) + return error( + "Trying to generate a non location node with target `JSONLocationTarget`. You found a bug in JSON code generation.", + ) +end function codegen_children(t::JSONTarget, ast::AST.NorgDocument, node::Node) res = String[] @@ -52,15 +56,17 @@ function codegen_children(t::JSONTarget, ast::AST.NorgDocument, node::Node) push!(res, r) end end - res + return res end function codegen(t::JSONTarget, ast::AST.NorgDocument) - jsonify([ - Symbol("pandoc-api-version") => jsonify(t.pandocapiversion) - :meta => "{}" - :blocks => jsonify(codegen_children(t, ast, ast.root)) - ]) + return jsonify( + [ + Symbol("pandoc-api-version") => jsonify(t.pandocapiversion) + :meta => "{}" + :blocks => jsonify(codegen_children(t, ast, ast.root)) + ], + ) end function codegen(t::JSONTarget, ::Paragraph, ast::NorgDocument, node::Node) @@ -75,33 +81,39 @@ function codegen(t::JSONTarget, ::Paragraph, ast::NorgDocument, node::Node) if !isempty(res) pop!(res) # remove last softbreak end - jsonify([ + return jsonify([ :t => jsonify(:Para) :c => jsonify(res) ]) end -codegen(t::JSONTarget, ::ParagraphSegment, ast::NorgDocument, node::Node) = join(codegen_children(t, ast, node), ",") +function codegen(t::JSONTarget, ::ParagraphSegment, ast::NorgDocument, node::Node) + return join(codegen_children(t, ast, node), ",") +end -pandoc_t(::Union{FreeFormBold, Bold}) = :Strong -pandoc_t(::Union{FreeFormItalic, Italic}) = :Emph -pandoc_t(::Union{FreeFormUnderline, Underline}) = :Underline -pandoc_t(::Union{FreeFormStrikethrough, Strikethrough}) = :Strikeout -pandoc_t(::Union{FreeFormSpoiler, Spoiler}) = :Span -pandoc_t(::Union{FreeFormSuperscript, Superscript}) = :Superscript -pandoc_t(::Union{FreeFormSubscript, Subscript}) = :Subscript -pandoc_t(::Union{FreeFormInlineCode, InlineCode}) = :Code +pandoc_t(::Union{FreeFormBold,Bold}) = :Strong +pandoc_t(::Union{FreeFormItalic,Italic}) = :Emph +pandoc_t(::Union{FreeFormUnderline,Underline}) = :Underline +pandoc_t(::Union{FreeFormStrikethrough,Strikethrough}) = :Strikeout +pandoc_t(::Union{FreeFormSpoiler,Spoiler}) = :Span +pandoc_t(::Union{FreeFormSuperscript,Superscript}) = :Superscript +pandoc_t(::Union{FreeFormSubscript,Subscript}) = :Subscript +pandoc_t(::Union{FreeFormInlineCode,InlineCode}) = :Code -pandoc_attr(::Union{FreeFormBold, Bold}) = [] -pandoc_attr(::Union{FreeFormItalic, Italic}) = [] -pandoc_attr(::Union{FreeFormUnderline, Underline}) = [] -pandoc_attr(::Union{FreeFormStrikethrough, Strikethrough}) = [] -pandoc_attr(::Union{FreeFormSpoiler, Spoiler}) = ["\"\"", jsonify(["\"spoiler\""]), jsonify([])] -pandoc_attr(::Union{FreeFormSuperscript, Superscript}) = [] -pandoc_attr(::Union{FreeFormSubscript, Subscript}) = [] -pandoc_attr(::Union{FreeFormInlineCode, InlineCode}) = ["\"\"", jsonify([]), jsonify([])] +pandoc_attr(::Union{FreeFormBold,Bold}) = [] +pandoc_attr(::Union{FreeFormItalic,Italic}) = [] +pandoc_attr(::Union{FreeFormUnderline,Underline}) = [] +pandoc_attr(::Union{FreeFormStrikethrough,Strikethrough}) = [] +function pandoc_attr(::Union{FreeFormSpoiler,Spoiler}) + return ["\"\"", jsonify(["\"spoiler\""]), jsonify([])] +end +pandoc_attr(::Union{FreeFormSuperscript,Superscript}) = [] +pandoc_attr(::Union{FreeFormSubscript,Subscript}) = [] +pandoc_attr(::Union{FreeFormInlineCode,InlineCode}) = ["\"\"", jsonify([]), jsonify([])] -function codegen(t::JSONTarget, s::T, ast::NorgDocument, node::Node) where {T<:AttachedModifierStrategy} +function codegen( + t::JSONTarget, s::T, ast::NorgDocument, node::Node +) where {T<:AttachedModifierStrategy} res = String[] for c in children(node) r = codegen(t, ast, c) @@ -118,50 +130,63 @@ function codegen(t::JSONTarget, s::T, ast::NorgDocument, node::Node) where {T<:A ]) else jsonify([ - :t => jsonify(pandoc_t(s)) + :t => jsonify(pandoc_t(s)) :c => jsonify([jsonify(attr), jsonify(res)]) ]) end end -function codegen(::JSONTarget, ::Union{InlineMath, FreeFormInlineMath}, ast::NorgDocument, node::Node) - jsonify([ - :t=>jsonify(:Math) - :c => jsonify([jsonify([:t=>jsonify(:InlineMath)]), "\""*textify(ast, node, escape_string)*"\""]) - ]) +function codegen( + ::JSONTarget, ::Union{InlineMath,FreeFormInlineMath}, ast::NorgDocument, node::Node +) + return jsonify( + [ + :t => jsonify(:Math) + :c => jsonify([ + jsonify([:t => jsonify(:InlineMath)]), + "\"" * textify(ast, node, escape_string) * "\"", + ]) + ], + ) end -function codegen(::JSONTarget, ::Union{Variable, FreeFormVariable}, ::NorgDocument, ::Node) - "" +function codegen(::JSONTarget, ::Union{Variable,FreeFormVariable}, ::NorgDocument, ::Node) + return "" end -function codegen(::JSONTarget, ::Union{NullModifier, FreeFormNullModifier}, ::NorgDocument, ::Node) - "" +function codegen( + ::JSONTarget, ::Union{NullModifier,FreeFormNullModifier}, ::NorgDocument, ::Node +) + return "" end -function codegen(::JSONTarget, s::Union{InlineCode, FreeFormInlineCode}, ast::NorgDocument, node::Node) - jsonify([ - :t=> jsonify(pandoc_t(s)) - :c => jsonify([jsonify(pandoc_attr(s)), "\""*textify(ast, node, escape_string)*"\""]) - ]) +function codegen( + ::JSONTarget, s::Union{InlineCode,FreeFormInlineCode}, ast::NorgDocument, node::Node +) + return jsonify( + [ + :t => jsonify(pandoc_t(s)) + :c => jsonify([ + jsonify(pandoc_attr(s)), "\"" * textify(ast, node, escape_string) * "\"" + ]) + ], + ) end function codegen(t::JSONTarget, ::Word, ast::NorgDocument, node::Node) if is_leaf(node) && (AST.stop(node) - AST.start(node) > 0) jsonify([ - :t=>jsonify(:Str) - :c=>"\"$(textify(ast, node, escape_string))\"" + :t => jsonify(:Str) + :c => "\"$(textify(ast, node, escape_string))\"" ]) elseif is_leaf(node) token = first(ast.tokens[AST.start(node):AST.stop(node)]) if Tokens.is_whitespace(token) - jsonify([ - :t=>jsonify(:Space) - ]) + jsonify([:t => jsonify(:Space)]) else jsonify([ - :t=>jsonify(:Str) - :c=>"\"$(textify(ast, node, escape_string))\"" + :t => jsonify(:Str) + :c => "\"$(textify(ast, node, escape_string))\"" ]) end else @@ -180,42 +205,58 @@ function codegen(t::JSONTarget, ::Link, ast::NorgDocument, node::Node) elseif kind(first(node.children)) == K"WikiLocation" text = jsonify([codegen(t, ast, children(first(children(node)))[1])]) elseif kind(first(node.children)) == K"TimestampLocation" - text = "\""*textify(ast, first(node.children), escape_string)*"\"" + text = "\"" * textify(ast, first(node.children), escape_string) * "\"" else - text = jsonify([jsonify([:t=>jsonify(:Str), :c=>"\""*codegen(JSONLocationTarget(), ast, first(node.children))*"\""])]) + text = jsonify([ + jsonify([ + :t => jsonify(:Str), + :c => + "\"" * codegen(JSONLocationTarget(), ast, first(node.children)) * "\"", + ]), + ]) end if kind(first(node.children)) == K"TimestampLocation" jsonify([ - :t=>jsonify(:Str) - :c=>text - ]) + :t => jsonify(:Str) + :c => text + ]) else target = codegen(JSONLocationTarget(), ast, first(node.children)) - jsonify([ - :t=>jsonify(:Link) - :c=>jsonify([ - jsonify([Symbol(""), jsonify(String[]), jsonify(String[])]), - text, - jsonify(["\""*target*"\"", Symbol("")]) + jsonify( + [ + :t => jsonify(:Link) + :c => jsonify([ + jsonify([Symbol(""), jsonify(String[]), jsonify(String[])]), + text, + jsonify(["\"" * target * "\"", Symbol("")]), ]) - ]) + ], + ) end end # fallback -codegen(::JSONTarget, ::URLLocation, ast, node) = error("You found a bug in URL location JSON code generation.") +function codegen(::JSONTarget, ::URLLocation, ast, node) + return error("You found a bug in URL location JSON code generation.") +end codegen(::JSONLocationTarget, ::URLLocation, ast, node) = textify(ast, node, escape_string) # fallback -codegen(::JSONTarget, ::LineNumberLocation, ast, node) = error("You found a bug in line number location JSON code generation.") +function codegen(::JSONTarget, ::LineNumberLocation, ast, node) + return error("You found a bug in line number location JSON code generation.") +end function codegen(::JSONLocationTarget, ::LineNumberLocation, ast::NorgDocument, node::Node) # Who are you, people who link to line location ? - "#l-$(textify(ast, node, escape_string))" + return "#l-$(textify(ast, node, escape_string))" end # fallback -codegen(::JSONTarget, ::DetachedModifierLocation, ast, node) = error("You found a bug in detached modifier location JSON code generation.") -function codegen(::JSONLocationTarget, ::DetachedModifierLocation, ast::NorgDocument, node::Node) +function codegen(::JSONTarget, ::DetachedModifierLocation, ast, node) + return error("You found a bug in detached modifier location JSON code generation.") +end +function codegen( + ::JSONLocationTarget, ::DetachedModifierLocation, ast::NorgDocument, node::Node +) kindoftarget = kind(first(children(node))) title = textify(ast, last(children(node)), escape_string) if AST.is_heading(kindoftarget) @@ -227,17 +268,23 @@ function codegen(::JSONLocationTarget, ::DetachedModifierLocation, ast::NorgDocu elseif kindoftarget == K"Footnote" "#" * "fn_" * idify(title) else - error("JSON code generation received an unknown Detached Modifier location: $kindoftarget") + error( + "JSON code generation received an unknown Detached Modifier location: $kindoftarget", + ) end end # fallback -codegen(::JSONTarget, ::MagicLocation, ast, node) = error("You found a bug in magic location JSON code generation.") +function codegen(::JSONTarget, ::MagicLocation, ast, node) + return error("You found a bug in magic location JSON code generation.") +end function codegen(::JSONLocationTarget, ::MagicLocation, ast::NorgDocument, node::Node) key = textify(ast, node, escape_string) if haskey(ast.targets, key) - kindoftarget, targetnoderef = ast.targets[key]::Tuple{Kind, Ref{Node}} - title = textify(ast, first(children(targetnoderef[]::Node)::Vector{Node}), escape_string) + kindoftarget, targetnoderef = ast.targets[key]::Tuple{Kind,Ref{Node}} + title = textify( + ast, first(children(targetnoderef[]::Node)::Vector{Node}), escape_string + ) if AST.is_heading(kindoftarget) level_num = AST.heading_level(kindoftarget) level = "h" * string(level_num) @@ -247,69 +294,83 @@ function codegen(::JSONLocationTarget, ::MagicLocation, ast::NorgDocument, node: elseif kindoftarget == K"Footnote" "#" * "fn_" * idify(title) else - error("JSON code generation received an unknown Detached Modifier location: $kindoftarget") + error( + "JSON code generation received an unknown Detached Modifier location: $kindoftarget", + ) end else - "" + "" end end # fallback -codegen(::JSONTarget, ::FileLocation, ast, node) = error("You found a bug in file location JSON code generation.") +function codegen(::JSONTarget, ::FileLocation, ast, node) + return error("You found a bug in file location JSON code generation.") +end function codegen(t::JSONLocationTarget, ::FileLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) if kind(target) == K"FileNorgRootTarget" - start = "/" + start = "/" else - start = "" + start = "" end target_loc = textify(ast, target, escape_string) if kind(subtarget) == K"None" - subtarget_loc = "" + subtarget_loc = "" else subtarget_loc = "#" * codegen(t, ast, subtarget) end - - start * target_loc * subtarget_loc + + return start * target_loc * subtarget_loc end # fallback -codegen(::JSONTarget, ::NorgFileLocation, ast, node) = error("You found a bug in norg file location JSON code generation.") +function codegen(::JSONTarget, ::NorgFileLocation, ast, node) + return error("You found a bug in norg file location JSON code generation.") +end function codegen(t::JSONLocationTarget, ::NorgFileLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) if kind(target) == K"FileNorgRootTarget" - start = "/" + start = "/" else - start = "" + start = "" end target_loc = textify(ast, target, escape_string) if kind(subtarget) == K"None" - subtarget_loc = "" + subtarget_loc = "" else subtarget_loc = "#" * codegen(t, ast, subtarget) end - - start * target_loc * subtarget_loc + + return start * target_loc * subtarget_loc end # fallback -codegen(::JSONTarget, ::WikiLocation, ast, node) = error("You found a bug in wiki location JSON code generation.") +function codegen(::JSONTarget, ::WikiLocation, ast, node) + return error("You found a bug in wiki location JSON code generation.") +end function codegen(t::JSONLocationTarget, ::WikiLocation, ast::NorgDocument, node::Node) target, subtarget = children(node) target_loc = textify(ast, target, escape_string) if kind(subtarget) == K"None" - subtarget_loc = "" + subtarget_loc = "" else subtarget_loc = "#" * codegen(t, ast, subtarget) end - "/" * target_loc * subtarget_loc + return "/" * target_loc * subtarget_loc end # fallback -codegen(::JSONTarget, ::TimestampLocation, ast, node) = error("You found a bug in timestamp location JSON code generation.") -codegen(::JSONLocationTarget, ::TimestampLocation, ast::NorgDocument, node::Node) = textify(ast, node, escape_string) +function codegen(::JSONTarget, ::TimestampLocation, ast, node) + return error("You found a bug in timestamp location JSON code generation.") +end +function codegen(::JSONLocationTarget, ::TimestampLocation, ast::NorgDocument, node::Node) + return textify(ast, node, escape_string) +end -codegen(t::JSONTarget, ::LinkDescription, ast::NorgDocument, node::Node) = jsonify(codegen_children(t, ast, node)) +function codegen(t::JSONTarget, ::LinkDescription, ast::NorgDocument, node::Node) + return jsonify(codegen_children(t, ast, node)) +end function codegen(t::JSONTarget, ::Anchor, ast::NorgDocument, node::Node) text = codegen(t, ast, first(node.children)) @@ -318,14 +379,16 @@ function codegen(t::JSONTarget, ::Anchor, ast::NorgDocument, node::Node) else target = codegen(JSONLocationTarget(), ast, last(children(node))) end - jsonify([ - :t=>jsonify(:Link) - :c=>jsonify([ - jsonify([Symbol(""), jsonify([]), jsonify([])]), - text, - jsonify(["\""*target*"\"", Symbol("")]) - ]) - ]) + return jsonify( + [ + :t => jsonify(:Link) + :c => jsonify([ + jsonify([Symbol(""), jsonify([]), jsonify([])]), + text, + jsonify(["\"" * target * "\"", Symbol("")]), + ]) + ], + ) end function codegen(t::JSONTarget, ::InlineLinkTarget, ast::NorgDocument, node::Node) @@ -341,13 +404,12 @@ function codegen(t::JSONTarget, ::InlineLinkTarget, ast::NorgDocument, node::Nod pop!(text) # remove last space end id = idify(join(textify(ast, node, escape_string))) - jsonify([ - :t=>jsonify(:Span) - :c=>jsonify([ - jsonify(["\""*id*"\"", jsonify([]), jsonify(Any[])]), - text - ]) - ]) + return jsonify( + [ + :t => jsonify(:Span) + :c => jsonify([jsonify(["\"" * id * "\"", jsonify([]), jsonify(Any[])]), text]) + ], + ) end function codegen(t::JSONTarget, ::Heading, ast::NorgDocument, node::Node) @@ -362,20 +424,28 @@ function codegen(t::JSONTarget, ::Heading, ast::NorgDocument, node::Node) else heading_title, heading_content... = codegen_children(t, ast, node) end - title = "\""*textify(ast, heading_title_node, escape_string)*"\"" - id_title = "\""*idify(level * " " * title)*"\"" - heading = jsonify([ - :t=>jsonify(:Header) - :c=>jsonify([level_num, jsonify([id_title, jsonify([]), jsonify([])]), jsonify([heading_title])]) - ]) - id_section = "\""*idify("section " * id_title)*"\"" - jsonify([ - :t=>jsonify(:Div) - :c=>jsonify([ - jsonify([id_section, jsonify([]), jsonify([])]), - jsonify([heading, heading_content...]) - ]) - ]) + title = "\"" * textify(ast, heading_title_node, escape_string) * "\"" + id_title = "\"" * idify(level * " " * title) * "\"" + heading = jsonify( + [ + :t => jsonify(:Header) + :c => jsonify([ + level_num, + jsonify([id_title, jsonify([]), jsonify([])]), + jsonify([heading_title]), + ]) + ], + ) + id_section = "\"" * idify("section " * id_title) * "\"" + return jsonify( + [ + :t => jsonify(:Div) + :c => jsonify([ + jsonify([id_section, jsonify([]), jsonify([])]), + jsonify([heading, heading_content...]), + ]) + ], + ) end codegen(::JSONTarget, ::StrongDelimiter, ast::NorgDocument, node::Node) = "" @@ -390,24 +460,32 @@ function codegen_nestable_children(t::JSONTarget, ast::NorgDocument, node::Node) push!(res, r) end end - res + return res end function codegen(t::JSONTarget, ::UnorderedList, ast::NorgDocument, node::Node) - jsonify([ - :t=>jsonify(:BulletList) - :c=>jsonify(codegen_nestable_children(t, ast, node)) - ]) + return jsonify( + [ + :t => jsonify(:BulletList) + :c => jsonify(codegen_nestable_children(t, ast, node)) + ] + ) end function codegen(t::JSONTarget, ::OrderedList, ast::NorgDocument, node::Node) - jsonify([ - :t=>jsonify(:OrderedList) - :c=>jsonify([ - jsonify(["1", jsonify([:t=>jsonify(:Decimal)]), jsonify([:t=>jsonify(:Period)])]), - jsonify(codegen_nestable_children(t, ast, node)) - ]) - ]) + return jsonify( + [ + :t => jsonify(:OrderedList) + :c => jsonify([ + jsonify([ + "1", + jsonify([:t => jsonify(:Decimal)]), + jsonify([:t => jsonify(:Period)]), + ]), + jsonify(codegen_nestable_children(t, ast, node)), + ]) + ], + ) end function codegen(t::JSONTarget, ::NestableItem, ast::NorgDocument, node::Node) @@ -415,14 +493,15 @@ function codegen(t::JSONTarget, ::NestableItem, ast::NorgDocument, node::Node) for c in children(node) if kind(c) == K"IndentSegment" append!(res, codegen(t, ast, c)) - elseif kind(c) ∉ KSet"TimestampExtension PriorityExtension DueDateExtension StartDateExtension" + elseif kind(c) ∉ + KSet"TimestampExtension PriorityExtension DueDateExtension StartDateExtension" r = codegen(t, ast, c) if !isempty(r) push!(res, r) end end end - jsonify(res) + return jsonify(res) end function codegen(t::JSONTarget, ::Quote, ast::NorgDocument, node::Node) @@ -430,11 +509,11 @@ function codegen(t::JSONTarget, ::Quote, ast::NorgDocument, node::Node) # that. res = String[] for c in children(node) - append!(res, filter(!isempty, codegen.(Ref(t), Ref(ast), children(c)))|>collect) + append!(res, collect(filter(!isempty, codegen.(Ref(t), Ref(ast), children(c))))) end - jsonify([ - :t=>jsonify(:BlockQuote) - :c=>jsonify(res) + return jsonify([ + :t => jsonify(:BlockQuote) + :c => jsonify(res) ]) end @@ -444,33 +523,39 @@ function codegen(t::JSONTarget, ::StandardRangedTag, ast::NorgDocument, node::No if tag_litteral == "comment" "" elseif tag_litteral == "example" - jsonify([ - :t=>jsonify(:CodeBlock) - :c=>jsonify([ - jsonify([Symbol(""), jsonify(["\"norg\""]), jsonify([])]), - "\""*textify(ast, last(others), escape_string)*"\"" - ]) - ]) + jsonify( + [ + :t => jsonify(:CodeBlock) + :c => jsonify([ + jsonify([Symbol(""), jsonify(["\"norg\""]), jsonify([])]), + "\"" * textify(ast, last(others), escape_string) * "\"", + ]) + ], + ) elseif tag_litteral == "details" # TODO "" elseif tag_litteral == "group" jsonify([ - :t=>jsonify(:Div), - :c=>jsonify([ - jsonify([Symbol(""), jsonify([]), jsonify([])]), - jsonify(codegen_children(t, ast, last(others))) - ]) + :t => jsonify(:Div), + :c => jsonify([ + jsonify([Symbol(""), jsonify([]), jsonify([])]), + jsonify(codegen_children(t, ast, last(others))), + ]), ]) else - @warn "Unknown standard ranged tag." tag_litteral ast.tokens[AST.start(node)] ast.tokens[AST.stop(node)] - jsonify([ - :t=>jsonify(:Div) - :c=>jsonify([ - jsonify([Symbol(""), jsonify([]), jsonify([])]), - jsonify(codegen_children(t, ast, last(others))) - ]) - ]) + @warn "Unknown standard ranged tag." tag_litteral ast.tokens[AST.start(node)] ast.tokens[AST.stop( + node + )] + jsonify( + [ + :t => jsonify(:Div) + :c => jsonify([ + jsonify([Symbol(""), jsonify([]), jsonify([])]), + jsonify(codegen_children(t, ast, last(others))), + ]) + ], + ) end end @@ -481,26 +566,30 @@ function codegen(::JSONTarget, ::Verbatim, ast::NorgDocument, node::Node) return "" end if length(others) == 1 - jsonify([ - :t=>jsonify(:CodeBlock) - :c=>jsonify([ - jsonify([Symbol(""), jsonify([]), jsonify([])]), - "\""*textify(ast, last(others), escape_string)*"\"" - ]) - ]) + jsonify( + [ + :t => jsonify(:CodeBlock) + :c => jsonify([ + jsonify([Symbol(""), jsonify([]), jsonify([])]), + "\"" * textify(ast, last(others), escape_string) * "\"", + ]) + ], + ) else language = if kind(first(others)) == K"TagParameter" litteral(ast, first(others)) else litteral(ast, others[2]) end - jsonify([ - :t=>jsonify(:CodeBlock) - :c=>jsonify([ - jsonify([Symbol(""), jsonify(["\""*language*"\""]), jsonify([])]), - "\""*textify(ast, last(others), escape_string)*"\"" + jsonify( + [ + :t => jsonify(:CodeBlock) + :c => jsonify([ + jsonify([Symbol(""), jsonify(["\"" * language * "\""]), jsonify([])]), + "\"" * textify(ast, last(others), escape_string) * "\"", ]) - ]) + ], + ) end end @@ -512,74 +601,90 @@ function codegen(::JSONTarget, ::TodoExtension, ast::NorgDocument, node::Node) else s = "☐" end - jsonify([ - :t=>jsonify(:Plain) - :c=>jsonify([jsonify([ - :t=>jsonify(:Str) - :c=>s - ])]) - ]) -end - -function codegen(t::JSONTarget, c::Union{WeakCarryoverTag, StrongCarryoverTag}, ast::NorgDocument, node::Node) + return jsonify( + [ + :t => jsonify(:Plain) + :c => jsonify([jsonify([ + :t => jsonify(:Str) + :c => s + ])]) + ] + ) +end + +function codegen( + t::JSONTarget, + c::Union{WeakCarryoverTag,StrongCarryoverTag}, + ast::NorgDocument, + node::Node, +) content = codegen(t, ast, last(children(node))) - label = "\""*textify(ast, first(children(node)), escape_string)*"\"" + label = "\"" * textify(ast, first(children(node)), escape_string) * "\"" # TODO: there's most likely some room for improvement here, as some contents # already have a mechanism for attributes, so the Div is not needed. attr = [Symbol(""), jsonify([]), jsonify([])] if length(children(node)) <= 2 attr[2] = jsonify([label]) elseif length(children(node)) == 3 - attr[3] = jsonify([jsonify([label, "\""*textify(ast, children(node)[2], escape_string)*"\""])]) + attr[3] = jsonify([ + jsonify([label, "\"" * textify(ast, children(node)[2], escape_string) * "\""]) + ]) else - attr[2] = jsonify(["\""*join(textify.(Ref(ast), children(node)[1:end-1], escape_string), "-")*"\""]) + attr[2] = jsonify([ + "\"" * + join(textify.(Ref(ast), children(node)[1:(end - 1)], escape_string), "-") * + "\"", + ]) end - t = if kind(node) == K"WeakCarryoverTag" && kind(last(children(node)))==K"ParagraphSegment" - :Span - else - :Div - end + t = + if kind(node) == K"WeakCarryoverTag" && + kind(last(children(node))) == K"ParagraphSegment" + :Span + else + :Div + end if !(first(content) == '[') content = jsonify([content]) end - jsonify([ - :t=>jsonify(t), - :c=>jsonify([jsonify(attr), content]) - ]) + return jsonify([:t => jsonify(t), :c => jsonify([jsonify(attr), content])]) end function codegen(t::JSONTarget, ::Definition, ast::NorgDocument, node::Node) items = children(node) - jsonify([ - :t=>jsonify(:DefinitionList) - :c=>map(items) do item - term, def... = children(item) - term_id = "def_" * idify(textify(ast, term, escape_string)) - term_node = jsonify([ - :t=>jsonify(:Span) - :c=>jsonify([ - jsonify(["\""*term_id*"\"", jsonify([]), jsonify([])]), - jsonify([codegen(t, ast, term)]) - ]) - ]) - def_node = jsonify(codegen.(Ref(t), Ref(ast), def)) - jsonify([jsonify([term_node]), jsonify([def_node])]) - end |> jsonify - ]) + return jsonify( + [ + :t => jsonify(:DefinitionList) + :c => jsonify(map(items) do item + term, def... = children(item) + term_id = "def_" * idify(textify(ast, term, escape_string)) + term_node = jsonify( + [ + :t => jsonify(:Span) + :c => jsonify([ + jsonify(["\"" * term_id * "\"", jsonify([]), jsonify([])]), + jsonify([codegen(t, ast, term)]), + ]) + ], + ) + def_node = jsonify(codegen.(Ref(t), Ref(ast), def)) + jsonify([jsonify([term_node]), jsonify([def_node])]) + end) + ], + ) end function codegen(::JSONTarget, ::Footnote, ast::NorgDocument, node::Node) # Return nothing, pandoc expects footnotes to be defined where they are called. - "" + return "" end function codegen(t::JSONTarget, ::Slide, ast::NorgDocument, node::Node) - codegen(t, ast, first(children(node))) + return codegen(t, ast, first(children(node))) end function codegen(t::JSONTarget, ::IndentSegment, ast::NorgDocument, node::Node) - codegen_children(t, ast, node) + return codegen_children(t, ast, node) end export JSONTarget diff --git a/src/kind.jl b/src/kind.jl index 4561e43..cac558d 100644 --- a/src/kind.jl +++ b/src/kind.jl @@ -12,14 +12,14 @@ module Kinds All the defined kind names. """ const _kind_names = [ - "None" - "StartOfFile" - "EndOfFile" - "BEGIN_WHITESPACE" + "None" + "StartOfFile" + "EndOfFile" + "BEGIN_WHITESPACE" "LineEnding" "Whitespace" - "END_WHITESPACE" - "BEGIN_PUNCTUATION" + "END_WHITESPACE" + "BEGIN_PUNCTUATION" "Punctuation" "\\" "*" @@ -50,37 +50,37 @@ const _kind_names = [ ")" "|" "+" - "END_PUNCTUATION" - "x" - "Word" + "END_PUNCTUATION" + "x" + "Word" - # AST stuff - "NorgDocument" - "BEGIN_AST_NODE" + # AST stuff + "NorgDocument" + "BEGIN_AST_NODE" # Leafs contain a set of tokens. "BEGIN_AST_LEAF" - "WordNode" - "Number" - "TagName" - "TagParameter" - "VerbatimBody" - "HeadingPreamble" - "NestablePreamble" - "LineNumberTarget" - "URLTarget" - "FileTarget" - "FileNorgRootTarget" - "Timestamp" - "BEGIN_TODO_STATUS" - "StatusUndone" - "StatusDone" - "StatusNeedFurtherInput" - "StatusUrgent" - "StatusRecurring" - "StatusInProgress" - "StatusOnHold" - "StatusCancelled" - "END_TODO_STATUS" + "WordNode" + "Number" + "TagName" + "TagParameter" + "VerbatimBody" + "HeadingPreamble" + "NestablePreamble" + "LineNumberTarget" + "URLTarget" + "FileTarget" + "FileNorgRootTarget" + "Timestamp" + "BEGIN_TODO_STATUS" + "StatusUndone" + "StatusDone" + "StatusNeedFurtherInput" + "StatusUrgent" + "StatusRecurring" + "StatusInProgress" + "StatusOnHold" + "StatusCancelled" + "END_TODO_STATUS" "END_AST_LEAF" "Paragraph" "ParagraphSegment" @@ -92,118 +92,117 @@ const _kind_names = [ "RangeableItem" "StandardRangedTagBody" "BEGIN_TAG" - "BEGIN_RANGED_TAG" - "Verbatim" - "END_RANGED_TAG" - "BEGIN_CARRYOVER_TAG" - "WeakCarryoverTag" - "StrongCarryoverTag" - "END_CARRYOVER_TAG" - "StandardRangedTag" + "BEGIN_RANGED_TAG" + "Verbatim" + "END_RANGED_TAG" + "BEGIN_CARRYOVER_TAG" + "WeakCarryoverTag" + "StrongCarryoverTag" + "END_CARRYOVER_TAG" + "StandardRangedTag" "END_TAG" "HeadingTitle" "BEGIN_MATCHED_INLINE" - "BEGIN_ATTACHED_MODIFIER" - "Bold" - "Italic" - "Underline" - "Strikethrough" - "Spoiler" - "Superscript" - "Subscript" - "InlineCode" - "NullModifier" - "InlineMath" - "Variable" - "BEGIN_FREE_FORM_ATTACHED_MODIFIER" - "FreeFormBold" - "FreeFormItalic" - "FreeFormUnderline" - "FreeFormStrikethrough" - "FreeFormSpoiler" - "FreeFormSuperscript" - "FreeFormSubscript" - "FreeFormInlineCode" - "FreeFormNullModifier" - "FreeFormInlineMath" - "FreeFormVariable" - "END_FREE_FORM_ATTACHED_MODIFIER" - "END_ATTACHED_MODIFIER" - "BEGIN_LINK_LOCATION" - "URLLocation" - "LineNumberLocation" - "DetachedModifierLocation" - "MagicLocation" - "FileLocation" - "NorgFileLocation" - "WikiLocation" - "TimestampLocation" - "END_LINK_LOCATION" - "LinkDescription" - "LinkLocation" - "InlineLinkTarget" + "BEGIN_ATTACHED_MODIFIER" + "Bold" + "Italic" + "Underline" + "Strikethrough" + "Spoiler" + "Superscript" + "Subscript" + "InlineCode" + "NullModifier" + "InlineMath" + "Variable" + "BEGIN_FREE_FORM_ATTACHED_MODIFIER" + "FreeFormBold" + "FreeFormItalic" + "FreeFormUnderline" + "FreeFormStrikethrough" + "FreeFormSpoiler" + "FreeFormSuperscript" + "FreeFormSubscript" + "FreeFormInlineCode" + "FreeFormNullModifier" + "FreeFormInlineMath" + "FreeFormVariable" + "END_FREE_FORM_ATTACHED_MODIFIER" + "END_ATTACHED_MODIFIER" + "BEGIN_LINK_LOCATION" + "URLLocation" + "LineNumberLocation" + "DetachedModifierLocation" + "MagicLocation" + "FileLocation" + "NorgFileLocation" + "WikiLocation" + "TimestampLocation" + "END_LINK_LOCATION" + "LinkDescription" + "LinkLocation" + "InlineLinkTarget" "END_MATCHED_INLINE" "BEGIN_DETACHED_MODIFIER" - "BEGIN_HEADING" - "Heading1" - "Heading2" - "Heading3" - "Heading4" - "Heading5" - "Heading6" - "END_HEADING" - "BEGIN_DELIMITING_MODIFIER" - "WeakDelimitingModifier" - "StrongDelimitingModifier" - "HorizontalRule" - "END_DELIMITING_MODIFIER" - "BEGIN_NESTABLE" - "BEGIN_UNORDERED_LIST" - "UnorderedList1" - "UnorderedList2" - "UnorderedList3" - "UnorderedList4" - "UnorderedList5" - "UnorderedList6" - "END_UNORDERED_LIST" - "BEGIN_ORDERED_LIST" - "OrderedList1" - "OrderedList2" - "OrderedList3" - "OrderedList4" - "OrderedList5" - "OrderedList6" - "END_ORDERED_LIST" - "BEGIN_QUOTE" - "Quote1" - "Quote2" - "Quote3" - "Quote4" - "Quote5" - "Quote6" - "END_QUOTE" - "END_NESTABLE" - "BEGIN_DETACHED_MODIFIER_EXTENSIONS" - "TodoExtension" - "TimestampExtension" - "PriorityExtension" - "DueDateExtension" - "StartDateExtension" - "END_DETACHED_MODIFIER_EXTENSIONS" - "BEGIN_RANGEABLE_DETACHED_MODIFIERS" - "Definition" - "TableCell" - "Footnote" - "END_RANGEABLE_DETACHED_MODIFIERS" - "BEGIN_DETACHED_MODIFIER_SUFFIX" - "Slide" - "IndentSegment" - "END_DETACHED_MODIFIER_SUFFIX" + "BEGIN_HEADING" + "Heading1" + "Heading2" + "Heading3" + "Heading4" + "Heading5" + "Heading6" + "END_HEADING" + "BEGIN_DELIMITING_MODIFIER" + "WeakDelimitingModifier" + "StrongDelimitingModifier" + "HorizontalRule" + "END_DELIMITING_MODIFIER" + "BEGIN_NESTABLE" + "BEGIN_UNORDERED_LIST" + "UnorderedList1" + "UnorderedList2" + "UnorderedList3" + "UnorderedList4" + "UnorderedList5" + "UnorderedList6" + "END_UNORDERED_LIST" + "BEGIN_ORDERED_LIST" + "OrderedList1" + "OrderedList2" + "OrderedList3" + "OrderedList4" + "OrderedList5" + "OrderedList6" + "END_ORDERED_LIST" + "BEGIN_QUOTE" + "Quote1" + "Quote2" + "Quote3" + "Quote4" + "Quote5" + "Quote6" + "END_QUOTE" + "END_NESTABLE" + "BEGIN_DETACHED_MODIFIER_EXTENSIONS" + "TodoExtension" + "TimestampExtension" + "PriorityExtension" + "DueDateExtension" + "StartDateExtension" + "END_DETACHED_MODIFIER_EXTENSIONS" + "BEGIN_RANGEABLE_DETACHED_MODIFIERS" + "Definition" + "TableCell" + "Footnote" + "END_RANGEABLE_DETACHED_MODIFIERS" + "BEGIN_DETACHED_MODIFIER_SUFFIX" + "Slide" + "IndentSegment" + "END_DETACHED_MODIFIER_SUFFIX" "END_DETACHED_MODIFIER" - "END_AST_NODE" + "END_AST_NODE" ] - """ Kind(name) K"name" @@ -212,9 +211,7 @@ This is type tag, used to specify the type of tokens and AST nodes. """ primitive type Kind 8 end -let kind_int_type = :UInt8, - max_kind_int = length(_kind_names)-1 - +let kind_int_type = :UInt8, max_kind_int = length(_kind_names) - 1 @eval begin function Kind(x::Integer) if x < 0 || x > $max_kind_int @@ -223,12 +220,12 @@ let kind_int_type = :UInt8, return Base.bitcast(Kind, convert($kind_int_type, x)) end - let kindstr_to_int = Dict(s=>i-1 for (i,s) in enumerate(_kind_names)) + let kindstr_to_int = Dict(s => i - 1 for (i, s) in enumerate(_kind_names)) function Base.convert(::Type{Kind}, s::AbstractString) i = get(kindstr_to_int, s) do error("unknown Kind name $(repr(s))") end - Kind(i) + return Kind(i) end end @@ -238,15 +235,29 @@ let kind_int_type = :UInt8, Base.typemin(::Type{Kind}) = Kind(0) Base.typemax(::Type{Kind}) = Kind($max_kind_int) - Base.instances(::Type{Kind}) = (Kind(i) for i in reinterpret($kind_int_type, typemin(Kind)):reinterpret($kind_int_type, typemax(Kind))) - Base.:<(x::Kind, y::Kind) = reinterpret($kind_int_type, x) < reinterpret($kind_int_type, y) + function Base.instances(::Type{Kind}) + return ( + Kind(i) for i in + reinterpret($kind_int_type, typemin(Kind)):reinterpret( + $kind_int_type, typemax(Kind) + ) + ) + end + function Base.:<(x::Kind, y::Kind) + return reinterpret($kind_int_type, x) < reinterpret($kind_int_type, y) + end - all_single_punctuation_tokens() = (Kind(i) for i in (reinterpret($kind_int_type, convert(Kind, "Punctuation"))+1):(reinterpret($kind_int_type, convert(Kind, "END_PUNCTUATION"))-1)) + function all_single_punctuation_tokens() + return ( + Kind(i) for i in + (reinterpret($kind_int_type, convert(Kind, "Punctuation")) + 1):(reinterpret($kind_int_type, convert(Kind, "END_PUNCTUATION")) - 1) + ) + end end end function Base.show(io::IO, k::Kind) - print(io, "K\"$(string(k))\"") + return print(io, "K\"$(string(k))\"") end """ @@ -256,7 +267,7 @@ For example * K">" is the kind of the greater than sign token """ macro K_str(s) - convert(Kind, s) + return convert(Kind, s) end """ @@ -286,11 +297,17 @@ kind(::Nothing) = K"None" is_leaf(k::Kind) = K"BEGIN_AST_LEAF" < k < K"END_AST_LEAF" is_matched_inline(k::Kind) = K"BEGIN_MATCHED_INLINE" < k < K"END_MATCHED_INLINE" is_attached_modifier(k::Kind) = K"BEGIN_ATTACHED_MODIFIER" < k < K"END_ATTACHED_MODIFIER" -is_free_form_attached_modifier(k::Kind) = K"BEGIN_FREE_FORM_ATTACHED_MODIFIER" < k < K"END_FREE_FORM_ATTACHED_MODIFIER" +function is_free_form_attached_modifier(k::Kind) + return K"BEGIN_FREE_FORM_ATTACHED_MODIFIER" < k < K"END_FREE_FORM_ATTACHED_MODIFIER" +end is_link_location(k::Kind) = K"BEGIN_LINK_LOCATION" < k < K"END_LINK_LOCATION" is_detached_modifier(k::Kind) = K"BEGIN_DETACHED_MODIFIER" < k < K"END_DETACHED_MODIFIER" -is_detached_modifier_extension(k::Kind) = K"BEGIN_DETACHED_MODIFIER_EXTENSIONS" < k < K"END_DETACHED_MODIFIER_EXTENSIONS" -is_delimiting_modifier(k::Kind) = K"BEGIN_DELIMITING_MODIFIER" < k < K"END_DELIMITING_MODIFIER" +function is_detached_modifier_extension(k::Kind) + return K"BEGIN_DETACHED_MODIFIER_EXTENSIONS" < k < K"END_DETACHED_MODIFIER_EXTENSIONS" +end +function is_delimiting_modifier(k::Kind) + return K"BEGIN_DELIMITING_MODIFIER" < k < K"END_DELIMITING_MODIFIER" +end is_nestable(k::Kind) = K"BEGIN_NESTABLE" < k < K"END_NESTABLE" is_heading(k::Kind) = K"BEGIN_HEADING" < k < K"END_HEADING" is_unordered_list(k::Kind) = K"BEGIN_UNORDERED_LIST" < k < K"END_UNORDERED_LIST" diff --git a/src/match/attached_modifiers.jl b/src/match/attached_modifiers.jl index cf96ddb..2a1a367 100644 --- a/src/match/attached_modifiers.jl +++ b/src/match/attached_modifiers.jl @@ -32,7 +32,7 @@ freeformattachedmodifier(::InlineCode) = K"FreeFormInlineCode" freeformattachedmodifier(::NullModifier) = K"FreeFormNullModifier" freeformattachedmodifier(::InlineMath) = K"FreeFormInlineMath" freeformattachedmodifier(::Variable) = K"FreeFormVariable" -freeformattachedmodifier(t::T) where {T <: FreeFormAttachedModifier} = attachedmodifier(t) +freeformattachedmodifier(t::T) where {T<:FreeFormAttachedModifier} = attachedmodifier(t) function match_norg(t::T, parents, tokens, i) where {T<:AttachedModifierStrategy} if K"LinkLocation" ∈ parents @@ -43,29 +43,38 @@ function match_norg(t::T, parents, tokens, i) where {T<:AttachedModifierStrategy prev_i = prevind(tokens, i) last_token = tokens[prev_i] # if opening modifier is found - if (is_sof(last_token) || is_punctuation(last_token) || is_whitespace(last_token)) && (!is_eof(next_token) && !is_whitespace(next_token)) + if (is_sof(last_token) || is_punctuation(last_token) || is_whitespace(last_token)) && + (!is_eof(next_token) && !is_whitespace(next_token)) if kind(next_token) == K"|" MatchFound(freeformattachedmodifier(t)) else MatchFound(attachedmodifier(t)) end - # Link modifier + # Link modifier elseif kind(last_token) == K":" && (!is_eof(next_token) && !is_whitespace(next_token)) prev_prev_i = prevind(tokens, prev_i) - if prev_prev_i >= firstindex(tokens) && (is_sof(tokens[prev_prev_i]) || is_punctuation(tokens[prev_prev_i]) || is_whitespace(tokens[prev_prev_i])) + if prev_prev_i >= firstindex(tokens) && ( + is_sof(tokens[prev_prev_i]) || + is_punctuation(tokens[prev_prev_i]) || + is_whitespace(tokens[prev_prev_i]) + ) MatchFound(attachedmodifier(t)) else MatchNotFound() end - # Closing modifier - elseif attachedmodifier(t) ∈ parents && !is_whitespace(last_token) && (is_eof(next_token) || is_whitespace(next_token) || is_punctuation(next_token)) - MatchClosing(attachedmodifier(t), first(parents)==attachedmodifier(t)) + # Closing modifier + elseif attachedmodifier(t) ∈ parents && + !is_whitespace(last_token) && + (is_eof(next_token) || is_whitespace(next_token) || is_punctuation(next_token)) + MatchClosing(attachedmodifier(t), first(parents) == attachedmodifier(t)) else MatchNotFound() end end -function match_norg(t::T, parents, tokens, i) where {T <: Union{VerbatimAttachedModifierStrategy, FreeFormAttachedModifier}} +function match_norg( + t::T, parents, tokens, i +) where {T<:Union{VerbatimAttachedModifierStrategy,FreeFormAttachedModifier}} if K"LinkLocation" ∈ parents return MatchNotFound() end @@ -75,7 +84,9 @@ function match_norg(t::T, parents, tokens, i) where {T <: Union{VerbatimAttached last_token = tokens[prev_i] token = tokens[i] # Opening modifier - if attachedmodifier(t) ∉ parents && (is_sof(last_token) || is_punctuation(last_token) || is_whitespace(last_token)) && (!is_eof(next_token) && !is_whitespace(next_token)) + if attachedmodifier(t) ∉ parents && + (is_sof(last_token) || is_punctuation(last_token) || is_whitespace(last_token)) && + (!is_eof(next_token) && !is_whitespace(next_token)) if kind(next_token) == K"|" # Edge case: we want to be able to write `|` (verbatim attached # modifiers have higher precedence than free-form attached modifiers) @@ -83,7 +94,11 @@ function match_norg(t::T, parents, tokens, i) where {T <: Union{VerbatimAttached token = tokens[i] next_i = nextind(tokens, i) next_token = tokens[next_i] - if kind(token) == K"`" && (is_punctuation(next_token) || is_whitespace(next_token) || is_eof(next_token)) + if kind(token) == K"`" && ( + is_punctuation(next_token) || + is_whitespace(next_token) || + is_eof(next_token) + ) MatchFound(attachedmodifier(t)) else MatchFound(freeformattachedmodifier(t)) @@ -93,15 +108,23 @@ function match_norg(t::T, parents, tokens, i) where {T <: Union{VerbatimAttached else MatchFound(attachedmodifier(t)) end - # Closing modifier + # Closing modifier elseif attachedmodifier(t) ∈ parents && t isa FreeFormAttachedModifier - MatchClosing(attachedmodifier(t), first(parents)==attachedmodifier(t)) - elseif attachedmodifier(t) ∈ parents && !is_whitespace(last_token) && (is_eof(next_token) || is_whitespace(next_token) || is_punctuation(next_token)) MatchClosing(attachedmodifier(t), first(parents) == attachedmodifier(t)) - # Link modifier - elseif !(t isa FreeFormAttachedModifier) && kind(last_token) == K":" && (!is_eof(next_token) && !is_whitespace(next_token)) + elseif attachedmodifier(t) ∈ parents && + !is_whitespace(last_token) && + (is_eof(next_token) || is_whitespace(next_token) || is_punctuation(next_token)) + MatchClosing(attachedmodifier(t), first(parents) == attachedmodifier(t)) + # Link modifier + elseif !(t isa FreeFormAttachedModifier) && + kind(last_token) == K":" && + (!is_eof(next_token) && !is_whitespace(next_token)) prev_prev_i = prevind(tokens, prev_i) - if prev_prev_i >= firstindex(tokens) && (is_sof(tokens[prev_prev_i]) || is_punctuation(tokens[prev_prev_i]) || is_whitespace(tokens[prev_prev_i])) + if prev_prev_i >= firstindex(tokens) && ( + is_sof(tokens[prev_prev_i]) || + is_punctuation(tokens[prev_prev_i]) || + is_whitespace(tokens[prev_prev_i]) + ) MatchFound(attachedmodifier(t)) else MatchNotFound() diff --git a/src/match/detached_modifiers.jl b/src/match/detached_modifiers.jl index afa40c8..cbd7638 100644 --- a/src/match/detached_modifiers.jl +++ b/src/match/detached_modifiers.jl @@ -24,7 +24,9 @@ function match_norg(::Heading, parents, tokens, i) # If we are in a standard ranged tag, the relevant parents are those # within the tag. ancestor_headings = filter(is_heading, relevant_parents) - higher_level_ancestor_heading = findfirst(≥(current_level)∘heading_level, ancestor_headings) + higher_level_ancestor_heading = findfirst( + ≥(current_level) ∘ heading_level, ancestor_headings + ) if !isnothing(higher_level_ancestor_heading) MatchClosing(ancestor_headings[higher_level_ancestor_heading], false) elseif first(relevant_parents) ∈ [K"ParagraphSegment", K"Paragraph"] @@ -49,7 +51,7 @@ function match_norg(t::T, parents, tokens, i) where {T<:DelimitingModifier} return MatchNotFound() end token = tokens[i] - if kind(next_token) == kind(token) + if kind(next_token) == kind(token) new_i = nextind(tokens, next_i) new_token = tokens[new_i] is_delimiting = true @@ -62,7 +64,8 @@ function match_norg(t::T, parents, tokens, i) where {T<:DelimitingModifier} new_token = tokens[new_i] end if is_delimiting - if first(parents) ∈ KSet"NorgDocument IndentSegment StandardRangedTagBody" || is_heading(first(parents)) + if first(parents) ∈ KSet"NorgDocument IndentSegment StandardRangedTagBody" || + is_heading(first(parents)) MatchFound(delimitingmodifier(t)) else MatchClosing(first(parents), false) @@ -76,7 +79,7 @@ function match_norg(t::T, parents, tokens, i) where {T<:DelimitingModifier} end function nestable(::Quote, l) - if l<=1 + if l <= 1 K"Quote1" elseif l == 2 K"Quote2" @@ -91,7 +94,7 @@ function nestable(::Quote, l) end end function nestable(::UnorderedList, l) - if l<=1 + if l <= 1 K"UnorderedList1" elseif l == 2 K"UnorderedList2" @@ -106,7 +109,7 @@ function nestable(::UnorderedList, l) end end function nestable(::OrderedList, l) - if l<=1 + if l <= 1 K"OrderedList1" elseif l == 2 K"OrderedList2" @@ -131,7 +134,9 @@ function match_norg(t::T, parents, tokens, i) where {T<:Nestable} next_token = tokens[new_i] if kind(next_token) == K"Whitespace" ancestor_nestable = filter(is_nestable, parents) - higher_level_ancestor_id = findfirst(>(current_level)∘nestable_level, ancestor_nestable) + higher_level_ancestor_id = findfirst( + >(current_level) ∘ nestable_level, ancestor_nestable + ) if !isnothing(higher_level_ancestor_id) MatchClosing(ancestor_nestable[higher_level_ancestor_id], false) elseif first(parents) == nestable(t, current_level) diff --git a/src/match/links.jl b/src/match/links.jl index 301c6c0..61f4969 100644 --- a/src/match/links.jl +++ b/src/match/links.jl @@ -31,7 +31,7 @@ function match_norg(::LinkDescription, parents, tokens, i) end function match_norg(::LinkSubTarget, parents, tokens, i) - if kind(first(parents)) == K"FileLocation" + if kind(first(parents)) == K"FileLocation" if isnumeric(first(value(tokens[i]))) MatchFound(K"LineNumberLocation") else @@ -61,7 +61,9 @@ function match_norg(::Anchor, parents, tokens, i) end function match_norg(::InlineLinkTarget, parents, tokens, i) - if kind(tokens[i]) == K"<" && kind(tokens[nextind(tokens, i)]) != K"LineEnding" && K"InlineLinkTarget" ∉ parents + if kind(tokens[i]) == K"<" && + kind(tokens[nextind(tokens, i)]) != K"LineEnding" && + K"InlineLinkTarget" ∉ parents MatchFound(K"InlineLinkTarget") else MatchNotFound() diff --git a/src/match/match.jl b/src/match/match.jl index 078599c..0932aef 100644 --- a/src/match/match.jl +++ b/src/match/match.jl @@ -14,7 +14,7 @@ be `found`, can be `closing` (*i.e.* closing an attached modifier), `continued` (as in "ignore this token and continue parsing"). Whether the parser should `consume` or not the current token is given by the `consume` field. """ -struct MatchResult +struct MatchResult kind::Kind found::Bool closing::Bool @@ -31,7 +31,7 @@ isclosing(m::MatchResult) = m.closing iscontinue(m::MatchResult) = m.continued isnotfound(m::MatchResult) = !m.found consume(m::MatchResult) = m.consume -matched(m::MatchResult)= m.kind +matched(m::MatchResult) = m.kind function Base.show(io::IO, m::MatchResult) if isclosing(m) @@ -43,7 +43,7 @@ function Base.show(io::IO, m::MatchResult) else print(io, "MatchFound(") end - print(io, "kind=$(matched(m)), consume=$(consume(m)))") + return print(io, "kind=$(matched(m)), consume=$(consume(m)))") end """ @@ -99,7 +99,7 @@ function match_norg(parents, tokens, i) match_norg(LineEnding(), parents, tokens, i) elseif kind(token) == K"*" match_norg(Star(), parents, tokens, i) - elseif kind(token) == K"/" + elseif kind(token) == K"/" match_norg(Slash(), parents, tokens, i) elseif kind(token) == K"_" match_norg(Underscore(), parents, tokens, i) @@ -118,25 +118,25 @@ function match_norg(parents, tokens, i) elseif kind(token) == K"&" match_norg(Ampersand(), parents, tokens, i) elseif kind(token) == K"\\" - match_norg(BackSlash(), parents, tokens, i) + match_norg(BackSlash(), parents, tokens, i) elseif kind(token) == K"=" - match_norg(EqualSign(), parents, tokens, i) + match_norg(EqualSign(), parents, tokens, i) elseif kind(token) == K"{" - match_norg(LeftBrace(), parents, tokens, i) + match_norg(LeftBrace(), parents, tokens, i) elseif kind(token) == K"}" - match_norg(RightBrace(), parents, tokens, i) + match_norg(RightBrace(), parents, tokens, i) elseif kind(token) == K"]" - match_norg(RightSquareBracket(), parents, tokens, i) + match_norg(RightSquareBracket(), parents, tokens, i) elseif kind(token) == K"[" - match_norg(LeftSquareBracket(), parents, tokens, i) + match_norg(LeftSquareBracket(), parents, tokens, i) elseif kind(token) == K"~" - match_norg(Tilde(), parents, tokens, i) + match_norg(Tilde(), parents, tokens, i) elseif kind(token) == K">" - match_norg(GreaterThanSign(), parents, tokens, i) + match_norg(GreaterThanSign(), parents, tokens, i) elseif kind(token) == K"<" - match_norg(LesserThanSign(), parents, tokens, i) + match_norg(LesserThanSign(), parents, tokens, i) elseif kind(token) == K"@" - match_norg(CommercialAtSign(), parents, tokens, i) + match_norg(CommercialAtSign(), parents, tokens, i) elseif kind(token) == K"(" match_norg(LeftParenthesis(), parents, tokens, i) elseif kind(token) == K")" @@ -159,10 +159,10 @@ function match_norg(parents, tokens, i) if isnotfound(m) m = match_norg(Word(), parents, tokens, i) end - m + return m end -function match_norg(::Word, parents, tokens, i) +function match_norg(::Word, parents, tokens, i) if is_nestable(first(parents)) MatchClosing(first(parents), false) else @@ -211,7 +211,7 @@ end function match_norg(::LineEnding, parents, tokens, i) prev_token = tokens[prevind(tokens, i)] - if first(parents) == K"NorgDocument" + if first(parents) == K"NorgDocument" MatchContinue() elseif is_line_ending(prev_token) nestable_parents = filter(is_nestable, parents[2:end]) @@ -244,7 +244,7 @@ function match_norg(::LineEnding, parents, tokens, i) MatchContinue() end elseif K"ParagraphSegment" ∈ parents - MatchClosing(K"ParagraphSegment", first(parents)==K"ParagraphSegment") + MatchClosing(K"ParagraphSegment", first(parents) == K"ParagraphSegment") elseif K"StandardRangedTagBody" ∈ parents i = nextind(tokens, i) m = match_norg(StandardRangedTag(), parents, tokens, nextind(tokens, i)) @@ -269,7 +269,7 @@ function match_norg(::Star, parents, tokens, i) if isnotfound(m) m = match_norg(Bold(), parents, tokens, i) end - m + return m end match_norg(::Slash, parents, tokens, i) = match_norg(Italic(), parents, tokens, i) @@ -305,12 +305,14 @@ function match_norg(::Minus, parents, tokens, i) end end -match_norg(::ExclamationMark, parents, tokens, i) = match_norg(Spoiler(), parents, tokens, i) +function match_norg(::ExclamationMark, parents, tokens, i) + return match_norg(Spoiler(), parents, tokens, i) +end function match_norg(::Circumflex, parents, tokens, i) prev_token = tokens[prevind(tokens, i)] m = if is_line_ending(prev_token) || is_sof(prev_token) - match_norg(Footnote(), parents, tokens, i) + match_norg(Footnote(), parents, tokens, i) else MatchNotFound() end @@ -323,9 +325,13 @@ end match_norg(::Comma, parents, tokens, i) = match_norg(Subscript(), parents, tokens, i) -match_norg(::BackApostrophe, parents, tokens, i) = match_norg(InlineCode(), parents, tokens, i) +function match_norg(::BackApostrophe, parents, tokens, i) + return match_norg(InlineCode(), parents, tokens, i) +end -match_norg(::PercentSign, parents, tokens, i) = match_norg(NullModifier(), parents, tokens, i) +function match_norg(::PercentSign, parents, tokens, i) + return match_norg(NullModifier(), parents, tokens, i) +end match_norg(::Ampersand, parents, tokens, i) = match_norg(Variable(), parents, tokens, i) @@ -338,13 +344,13 @@ function match_norg(::Colon, parents, tokens, i) next_token = tokens[next_i] prev_i = prevind(tokens, i) prev_token = tokens[prev_i] - if kind(next_token) ∈ ATTACHED_DELIMITERS + if kind(next_token) ∈ ATTACHED_DELIMITERS m = match_norg(parents, tokens, next_i) if isfound(m) && AST.is_attached_modifier(kind(matched(m))) return MatchContinue() end end - MatchNotFound() + MatchNotFound() else m end @@ -395,7 +401,7 @@ function match_norg(::LeftSquareBracket, parents, tokens, i) end prev_i = prevind(tokens, i) last_token = tokens[prev_i] - next_i = nextind(tokens,i) + next_i = nextind(tokens, i) next_token = tokens[next_i] if kind(last_token) == K"}" && kind(next_token) != K"LineEnding" MatchFound(K"LinkDescription") @@ -424,13 +430,15 @@ function match_norg(::GreaterThanSign, parents, tokens, i) end end -match_norg(::LesserThanSign, parents, tokens, i) = match_norg(InlineLinkTarget(), parents, tokens, i) +function match_norg(::LesserThanSign, parents, tokens, i) + return match_norg(InlineLinkTarget(), parents, tokens, i) +end tag_to_strategy(::CommercialAtSign) = Verbatim() tag_to_strategy(::Plus) = WeakCarryoverTag() tag_to_strategy(::NumberSign) = StrongCarryoverTag() -function match_norg(t::Union{CommercialAtSign, Plus, NumberSign}, parents, tokens, i) +function match_norg(t::Union{CommercialAtSign,Plus,NumberSign}, parents, tokens, i) prev_token = tokens[prevind(tokens, i)] if is_sof(prev_token) || is_line_ending(prev_token) match_norg(tag_to_strategy(t), parents, tokens, i) @@ -450,7 +458,7 @@ end function match_norg(::DollarSign, parents, tokens, i) prev_token = tokens[prevind(tokens, i)] m = if is_line_ending(prev_token) || is_sof(prev_token) - match_norg(Definition(), parents, tokens, i) + match_norg(Definition(), parents, tokens, i) else MatchNotFound() end @@ -468,7 +476,7 @@ function match_norg(::VerticalBar, parents, tokens, i) match_norg(StandardRangedTag(), parents, tokens, i) elseif kind(next_token) == K"*" match_norg(FreeFormBold(), parents, tokens, i) - elseif kind(next_token) == K"/" + elseif kind(next_token) == K"/" match_norg(FreeFormItalic(), parents, tokens, i) elseif kind(next_token) == K"_" match_norg(FreeFormUnderline(), parents, tokens, i) @@ -494,7 +502,8 @@ function match_norg(::VerticalBar, parents, tokens, i) end function match_norg(::LeftParenthesis, parents, tokens, i) - if is_detached_modifier(first(parents)) || (length(parents) > 1 && is_detached_modifier(parents[2])) + if is_detached_modifier(first(parents)) || + (length(parents) > 1 && is_detached_modifier(parents[2])) match_norg(DetachedModifierExtension(), parents, tokens, i) else MatchNotFound() diff --git a/src/match/rangeable_detached_modifier.jl b/src/match/rangeable_detached_modifier.jl index ff9f938..20f6c59 100644 --- a/src/match/rangeable_detached_modifier.jl +++ b/src/match/rangeable_detached_modifier.jl @@ -15,7 +15,8 @@ function match_norg(t::T, parents, tokens, i) where {T<:RangeableDetachedModifie if kind(token) == K"Whitespace" if first(parents) == K"Slide" MatchFound(rangeable_from_strategy(t)) - elseif (K"NestableItem" ∈ parents || AST.is_nestable(first(parents))) && K"Slide" ∉ parents + elseif (K"NestableItem" ∈ parents || AST.is_nestable(first(parents))) && + K"Slide" ∉ parents MatchClosing(first(parents), false) elseif !isdisjoint(parents, KSet"Paragraph ParagraphSegment") MatchClosing(first(parents), false) @@ -30,7 +31,8 @@ function match_norg(t::T, parents, tokens, i) where {T<:RangeableDetachedModifie else if first(parents) == K"Slide" MatchFound(rangeable_from_strategy(t)) - elseif (K"NestableItem" ∈ parents || AST.is_nestable(first(parents))) && K"Slide" ∉ parents + elseif (K"NestableItem" ∈ parents || AST.is_nestable(first(parents))) && + K"Slide" ∉ parents MatchClosing(first(parents), false) elseif !isdisjoint(parents, KSet"Paragraph ParagraphSegment") MatchClosing(first(parents), false) @@ -38,7 +40,9 @@ function match_norg(t::T, parents, tokens, i) where {T<:RangeableDetachedModifie MatchFound(rangeable_from_strategy(t)) end end - elseif kind(token) == rangeable_from_token(t) && kind(next_token) == K"LineEnding" && rangeable_from_strategy(t) ∈ parents + elseif kind(token) == rangeable_from_token(t) && + kind(next_token) == K"LineEnding" && + rangeable_from_strategy(t) ∈ parents nextline_i = consume_until(K"LineEnding", tokens, i) token = tokens[nextline_i] nextline_start_i = if kind(token) == K"Whitespace" @@ -49,13 +53,13 @@ function match_norg(t::T, parents, tokens, i) where {T<:RangeableDetachedModifie token = tokens[nextline_start_i] if kind(token) == rangeable_from_token(t) m = match_norg(t, parents, tokens, nextline_start_i) - if isfound(m) && matched(m)==rangeable_from_strategy(t) + if isfound(m) && matched(m) == rangeable_from_strategy(t) MatchClosing(first(parents), true) else - MatchClosing(first(parents), rangeable_from_strategy(t)==first(parents)) + MatchClosing(first(parents), rangeable_from_strategy(t) == first(parents)) end else - MatchClosing(first(parents), rangeable_from_strategy(t)==first(parents)) + MatchClosing(first(parents), rangeable_from_strategy(t) == first(parents)) end else MatchNotFound() diff --git a/src/match/tags.jl b/src/match/tags.jl index ce90162..b8a36fd 100644 --- a/src/match/tags.jl +++ b/src/match/tags.jl @@ -4,7 +4,7 @@ token_tag(::Verbatim) = K"@" token_tag(::StandardRangedTag) = K"|" body(::Verbatim) = K"VerbatimBody" body(::StandardRangedTag) = K"StandardRangedTagBody" -function match_norg(t::T, parents, tokens, i) where {T <: Tag} +function match_norg(t::T, parents, tokens, i) where {T<:Tag} i = nextind(tokens, i) token = tokens[i] if kind(token) == K"Word" @@ -20,7 +20,11 @@ function match_norg(t::T, parents, tokens, i) where {T <: Tag} MatchNotFound() elseif kind(first(parents)) ∈ KSet"Slide IndentSegment" MatchFound(tag(t)) - elseif !(is_nestable(first(parents)) || is_heading(first(parents)) || kind(first(parents)) ∈ KSet"NorgDocument StandardRangedTagBody") + elseif !( + is_nestable(first(parents)) || + is_heading(first(parents)) || + kind(first(parents)) ∈ KSet"NorgDocument StandardRangedTagBody" + ) MatchClosing(first(parents), false) else MatchFound(tag(t)) @@ -54,7 +58,9 @@ function match_norg(::StrongCarryoverTag, parents, tokens, i) parents end if kind(token) == K"Word" - if is_nestable(first(relevant_parents)) || K"Paragraph" ∈ relevant_parents || K"NestableItem" ∈ relevant_parents + if is_nestable(first(relevant_parents)) || + K"Paragraph" ∈ relevant_parents || + K"NestableItem" ∈ relevant_parents MatchClosing(first(relevant_parents), false) else MatchFound(K"StrongCarryoverTag") diff --git a/src/parser/attachedmodifier.jl b/src/parser/attachedmodifier.jl index 90b42fa..e93af6f 100644 --- a/src/parser/attachedmodifier.jl +++ b/src/parser/attachedmodifier.jl @@ -23,7 +23,9 @@ consumepost(::FreeFormInlineCode) = 2 consumepost(::FreeFormInlineMath) = 2 consumepost(::FreeFormVariable) = 2 -function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where {T<:AttachedModifierStrategy} +function parse_norg( + t::T, parents::Vector{Kind}, tokens::Vector{Token}, i +) where {T<:AttachedModifierStrategy} start = i children = AST.Node[] for _ in 1:consumepre(t) @@ -35,7 +37,7 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where m = match_norg([node_kind, parents...], tokens, i) if isclosing(m) if consume(m) && consumepost(t) >= 2 - for _ in 1:(consumepost(t)-1) + for _ in 1:(consumepost(t) - 1) i = nextind(tokens, i) end end @@ -52,17 +54,24 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where if is_eof(tokens[i]) || (isclosing(m) && matched(m) == K"None") || # Special case for inline code precedence. (isclosing(m) && matched(m) != node_kind && matched(m) ∈ parents) # we've been tricked in thincking we were in a modifier. - new_children = [parse_norg(Word(), parents, tokens, start), first(children).children...] - children[1] = AST.Node(K"ParagraphSegment", new_children, start, AST.stop(first(children))) + new_children = [ + parse_norg(Word(), parents, tokens, start), first(children).children... + ] + children[1] = AST.Node( + K"ParagraphSegment", new_children, start, AST.stop(first(children)) + ) i = prevind(tokens, i) node_kind = K"None" elseif isempty(children) # Empty attached modifiers are forbiddens - children = [parse_norg(Word(), parents, tokens, start), parse_norg(Word(), parents, tokens, i)] + children = [ + parse_norg(Word(), parents, tokens, start), + parse_norg(Word(), parents, tokens, i), + ] node_kind = K"None" elseif isclosing(m) && !consume(m) i = prevind(tokens, i) elseif isclosing(m) && kind(tokens[nextind(tokens, i)]) == K":" i = nextind(tokens, i) end - AST.Node(node_kind, children, start, i) + return AST.Node(node_kind, children, start, i) end diff --git a/src/parser/detachedmodifierextensions.jl b/src/parser/detachedmodifierextensions.jl index 42e6596..b8e8cd5 100644 --- a/src/parser/detachedmodifierextensions.jl +++ b/src/parser/detachedmodifierextensions.jl @@ -1,7 +1,9 @@ -function parse_norg(::DetachedModifierExtension, parents::Vector{Kind}, tokens::Vector{Token}, i) +function parse_norg( + ::DetachedModifierExtension, parents::Vector{Kind}, tokens::Vector{Token}, i +) m = match_norg(DetachedModifierExtension(), parents, tokens, i) if !Match.isfound(m) - return AST.Node(K"None") + return AST.Node(K"None") end extension = matched(m) if extension == K"TodoExtension" @@ -22,7 +24,7 @@ function parse_norg(::TodoExtension, parents::Vector{Kind}, tokens::Vector{Token start = i i = nextind(tokens, i) token = tokens[i] - statusstart=i + statusstart = i if kind(token) == K"Whitespace" status = K"StatusUndone" elseif kind(token) == K"x" @@ -50,9 +52,16 @@ function parse_norg(::TodoExtension, parents::Vector{Kind}, tokens::Vector{Token i = piped.stop end if kind(piped) == K"None" - AST.Node(K"TodoExtension", [AST.Node(status, [], statusstart, statusstart)], start, i) + AST.Node( + K"TodoExtension", [AST.Node(status, [], statusstart, statusstart)], start, i + ) else - AST.Node(K"TodoExtension", [AST.Node(status, [], statusstart, statusstart), piped], start, i) + AST.Node( + K"TodoExtension", + [AST.Node(status, [], statusstart, statusstart), piped], + start, + i, + ) end end diff --git a/src/parser/detachedmodifiersuffix.jl b/src/parser/detachedmodifiersuffix.jl index 4e91b58..bfc4c81 100644 --- a/src/parser/detachedmodifiersuffix.jl +++ b/src/parser/detachedmodifiersuffix.jl @@ -1,6 +1,6 @@ function parse_norg(::Slide, parents::Vector{Kind}, tokens::Vector{Token}, i) start = i - i = consume_until(K"LineEnding", tokens, i) + i = consume_until(K"LineEnding", tokens, i) p = [K"Slide", parents...] m = match_norg(p, tokens, i) children = if isfound(m) @@ -18,12 +18,12 @@ function parse_norg(::Slide, parents::Vector{Kind}, tokens::Vector{Token}, i) else AST.Node[] end - AST.Node(K"Slide", children, start, AST.stop(last(children))) + return AST.Node(K"Slide", children, start, AST.stop(last(children))) end function parse_norg(::IndentSegment, parents::Vector{Kind}, tokens::Vector{Token}, i) start = i - i = consume_until(K"LineEnding", tokens, i) + i = consume_until(K"LineEnding", tokens, i) p = [K"IndentSegment", parents...] m = Match.MatchClosing(K"IndentSegment") children = [] @@ -84,5 +84,5 @@ function parse_norg(::IndentSegment, parents::Vector{Kind}, tokens::Vector{Token if isclosing(m) && !(matched(m) == K"IndentSegment" && consume(m)) i = prevind(tokens, i) end - AST.Node(K"IndentSegment", children, start, i) + return AST.Node(K"IndentSegment", children, start, i) end diff --git a/src/parser/link.jl b/src/parser/link.jl index 9038439..3163044 100644 --- a/src/parser/link.jl +++ b/src/parser/link.jl @@ -1,6 +1,5 @@ limit_tokens(tokens, stop) = [tokens[begin:stop]...; EOFToken()]::Vector{Token} - function parse_norg(::Link, parents::Vector{Kind}, tokens::Vector{Token}, i) start = i i = nextind(tokens, i) @@ -85,7 +84,9 @@ function parse_norg(::URLLocation, parents::Vector{Kind}, tokens::Vector{Token}, else stop = i i = prevind(tokens, i) - AST.Node(K"URLLocation", [AST.Node(K"URLTarget", AST.Node[], start, i)], start, stop) + AST.Node( + K"URLLocation", [AST.Node(K"URLTarget", AST.Node[], start, i)], start, stop + ) end end @@ -108,11 +109,18 @@ function parse_norg(::LineNumberLocation, parents::Vector{Kind}, tokens::Vector{ else stop = i i = prevind(tokens, i) - AST.Node(K"LineNumberLocation", [AST.Node(K"LineNumberTarget", AST.Node[], start, i)], start, stop) + AST.Node( + K"LineNumberLocation", + [AST.Node(K"LineNumberTarget", AST.Node[], start, i)], + start, + stop, + ) end end -function parse_norg(::DetachedModifierLocation, parents::Vector{Kind}, tokens::Vector{Token}, i) +function parse_norg( + ::DetachedModifierLocation, parents::Vector{Kind}, tokens::Vector{Token}, i +) start = i token = tokens[i] if kind(token) == K"*" @@ -162,7 +170,7 @@ function parse_norg(::DetachedModifierLocation, parents::Vector{Kind}, tokens::V p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start_heading_title) if kind(token) == K"}" children = AST.Node[] - for (i,c) in enumerate(p.children) + for (i, c) in enumerate(p.children) append!(children, c.children) if i < lastindex(p.children) push!(children, AST.Node(K"WordNode", AST.Node[], c.stop, c.stop)) @@ -171,9 +179,14 @@ function parse_norg(::DetachedModifierLocation, parents::Vector{Kind}, tokens::V content = AST.Node(K"ParagraphSegment", children, p.start, p.stop) AST.Node(K"DetachedModifierLocation", [AST.Node(heading_kind), content], start, i) else - c = [AST.Node(K"WordNode", [], j, j) for j ∈ start:(p.start-1)] + c = [AST.Node(K"WordNode", [], j, j) for j in start:(p.start - 1)] children = p.children - ps = AST.Node(K"ParagraphSegment", AST.Node[c...;children[1].children...], start, children[1].stop) + ps = AST.Node( + K"ParagraphSegment", + AST.Node[c...; children[1].children...], + start, + children[1].stop, + ) children[1] = ps AST.Node(K"None", children, start, i) end @@ -200,7 +213,7 @@ function parse_norg(::MagicLocation, parents::Vector{Kind}, tokens::Vector{Token p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start_heading_title) if kind(token) == K"}" children = AST.Node[] - for (i,c) in enumerate(p.children) + for (i, c) in enumerate(p.children) append!(children, c.children) if i < lastindex(p.children) push!(children, AST.Node(K"WordNode", [], c.stop, c.stop)) @@ -209,9 +222,14 @@ function parse_norg(::MagicLocation, parents::Vector{Kind}, tokens::Vector{Token content = AST.Node(K"ParagraphSegment", children, p.start, p.stop) AST.Node(K"MagicLocation", [content], start, i) else - c = [AST.Node(K"WordNode", [], j, j) for j ∈ start:(p.start-1)] + c = [AST.Node(K"WordNode", [], j, j) for j in start:(p.start - 1)] children = p.children - ps = AST.Node(K"ParagraphSegment", AST.Node[c...;children[1].children...], start, children[1].stop) + ps = AST.Node( + K"ParagraphSegment", + AST.Node[c...; children[1].children...], + start, + children[1].stop, + ) children[1] = ps AST.Node(K"None", children, start, i) end @@ -219,7 +237,9 @@ end filelocationkind(::FileLocation) = K"FileLocation" filelocationkind(::NorgFileLocation) = K"NorgFileLocation" -function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i,) where { T <: Union{FileLocation, NorgFileLocation}} +function parse_norg( + t::T, parents::Vector{Kind}, tokens::Vector{Token}, i +) where {T<:Union{FileLocation,NorgFileLocation}} start = i i = nextind(tokens, i) token = tokens[i] @@ -271,17 +291,19 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i,) wher end if isclosing(m) && matched(m) != filelocationkind(t) && kind(token) != K"}" p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start) - return AST.Node(K"None", vcat([c.children for c in p.children]...), start, i) + return AST.Node( + K"None", vcat([c.children for c in p.children]...), start, i + ) end else i = AST.stop(subtarget) # subtarget = first(children(subtarget)) end end - AST.Node(filelocationkind(t), [file_target, subtarget], start, i) + return AST.Node(filelocationkind(t), [file_target, subtarget], start, i) end -function parse_norg(::WikiLocation, parents::Vector{Kind}, tokens::Vector{Token}, i,) +function parse_norg(::WikiLocation, parents::Vector{Kind}, tokens::Vector{Token}, i) start = i i = nextind(tokens, i) token = tokens[i] @@ -304,7 +326,7 @@ function parse_norg(::WikiLocation, parents::Vector{Kind}, tokens::Vector{Token} content = AST.Node(K"None") if kind(token) ∈ KSet"} :" children = AST.Node[] - for (i,c) in enumerate(p.children) + for (i, c) in enumerate(p.children) append!(children, c.children) if i < lastindex(p.children) push!(children, AST.Node(K"WordNode", [], c.stop, c.stop)) @@ -312,9 +334,14 @@ function parse_norg(::WikiLocation, parents::Vector{Kind}, tokens::Vector{Token} end content = AST.Node(K"ParagraphSegment", children, p.start, p.stop) else - c = [AST.Node(K"WordNode", [], j, j) for j ∈ start:(p.start-1)] + c = [AST.Node(K"WordNode", [], j, j) for j in start:(p.start - 1)] children = p.children - ps = AST.Node(K"ParagraphSegment", AST.Node[c...;children[1].children...], start, children[1].stop) + ps = AST.Node( + K"ParagraphSegment", + AST.Node[c...; children[1].children...], + start, + children[1].stop, + ) children[1] = ps return AST.Node(K"None", children, start, i) end @@ -332,17 +359,18 @@ function parse_norg(::WikiLocation, parents::Vector{Kind}, tokens::Vector{Token} end if isclosing(m) && matched(m) != K"WikiLocation" && kind(token) != K"}" p = parse_norg(Paragraph(), parents, limit_tokens(tokens, i), start) - return AST.Node(K"None", vcat([c.children for c in p.children]...), start, i) + return AST.Node( + K"None", vcat([c.children for c in p.children]...), start, i + ) end else i = AST.stop(subtarget) end end - AST.Node(K"WikiLocation", [content, subtarget], start, i) + return AST.Node(K"WikiLocation", [content, subtarget], start, i) end - -function parse_norg(::TimestampLocation, parents::Vector{Kind}, tokens::Vector{Token}, i,) +function parse_norg(::TimestampLocation, parents::Vector{Kind}, tokens::Vector{Token}, i) start = i i = nextind(tokens, i) token = tokens[i] @@ -350,7 +378,7 @@ function parse_norg(::TimestampLocation, parents::Vector{Kind}, tokens::Vector{T i = nextind(tokens, i) token = tokens[i] end - start_timestamp=i + start_timestamp = i m = match_norg(parents, tokens, i) while !is_eof(token) && !isclosing(m) i = nextind(tokens, i) @@ -366,7 +394,12 @@ function parse_norg(::TimestampLocation, parents::Vector{Kind}, tokens::Vector{T else stop = i i = prevind(tokens, i) - AST.Node(K"TimestampLocation", [AST.Node(K"Timestamp", AST.Node[], start_timestamp, i)], start, stop) + AST.Node( + K"TimestampLocation", + [AST.Node(K"Timestamp", AST.Node[], start_timestamp, i)], + start, + stop, + ) end end @@ -380,7 +413,9 @@ function parse_norg(::LinkDescription, parents::Vector{Kind}, tokens::Vector{Tok if isclosing(m) break end - segment = parse_norg(ParagraphSegment(), [K"LinkDescription", parents...], tokens, i) + segment = parse_norg( + ParagraphSegment(), [K"LinkDescription", parents...], tokens, i + ) i = nextind(tokens, AST.stop(segment)) if kind(segment) == K"None" append!(children, segment.children) @@ -392,14 +427,18 @@ function parse_norg(::LinkDescription, parents::Vector{Kind}, tokens::Vector{Tok if is_eof(tokens[i]) || (isclosing(m) && matched(m) != K"LinkDescription" && matched(m) ∈ parents) || # we've been tricked in thincking we were in a link description (isclosing(m) && kind(tokens[i]) != K"]") - new_children = [parse_norg(Word(), parents, tokens, start), first(children).children...] - children[1] = AST.Node(K"ParagraphSegment", new_children, start, AST.stop(first(children))) + new_children = [ + parse_norg(Word(), parents, tokens, start), first(children).children... + ] + children[1] = AST.Node( + K"ParagraphSegment", new_children, start, AST.stop(first(children)) + ) i = prevind(tokens, i) node_kind = K"None" elseif isclosing(m) && !consume(m) i = prevind(tokens, i) end - AST.Node(node_kind, children, start, i) + return AST.Node(node_kind, children, start, i) end function parse_norg(::Anchor, parents::Vector{Kind}, tokens::Vector{Token}, i) @@ -440,7 +479,9 @@ function parse_norg(::InlineLinkTarget, parents::Vector{Kind}, tokens::Vector{To if isclosing(m) break end - segment = parse_norg(ParagraphSegment(), [K"InlineLinkTarget", parents...], tokens, i) + segment = parse_norg( + ParagraphSegment(), [K"InlineLinkTarget", parents...], tokens, i + ) i = nextind(tokens, AST.stop(segment)) if kind(segment) == K"None" append!(children, segment.children) @@ -452,12 +493,16 @@ function parse_norg(::InlineLinkTarget, parents::Vector{Kind}, tokens::Vector{To if is_eof(tokens[i]) || (isclosing(m) && matched(m) != K"InlineLinkTarget" && matched(m) ∈ parents) || # we've been tricked in thincking we were in a link description (isclosing(m) && kind(tokens[i]) != K">") - new_children = [parse_norg(Word(), parents, tokens, start), first(children).children...] - children[1] = AST.Node(K"ParagraphSegment", new_children, start, AST.stop(first(children))) + new_children = [ + parse_norg(Word(), parents, tokens, start), first(children).children... + ] + children[1] = AST.Node( + K"ParagraphSegment", new_children, start, AST.stop(first(children)) + ) i = prevind(tokens, i) node_kind = K"None" elseif isclosing(m) && !consume(m) i = prevind(tokens, i) end - AST.Node(node_kind, children, start, i) + return AST.Node(node_kind, children, start, i) end diff --git a/src/parser/nestablemodifier.jl b/src/parser/nestablemodifier.jl index ef0d42b..4d58368 100644 --- a/src/parser/nestablemodifier.jl +++ b/src/parser/nestablemodifier.jl @@ -1,4 +1,6 @@ -function parse_norg(::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where {T<:Nestable} +function parse_norg( + ::T, parents::Vector{Kind}, tokens::Vector{Token}, i +) where {T<:Nestable} start = i # TODO: This is innefficient because this match has already been done at this # point, so we could transmit the information through the strategy. But this @@ -25,7 +27,7 @@ function parse_norg(::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where end push!(children, child) end - AST.Node(nestable_kind, children, start, i) + return AST.Node(nestable_kind, children, start, i) end function parse_norg(::NestableItem, parents::Vector{Kind}, tokens::Vector{Token}, i) @@ -59,13 +61,15 @@ function parse_norg(::NestableItem, parents::Vector{Kind}, tokens::Vector{Token} if to_parse == K"Verbatim" child = parse_norg(Verbatim(), [K"NestableItem", parents...], tokens, i) elseif to_parse == K"StandardRangedTag" - child = parse_norg(StandardRangedTag(), [K"NestableItem", parents...], tokens, i) + child = parse_norg( + StandardRangedTag(), [K"NestableItem", parents...], tokens, i + ) elseif is_quote(to_parse) - child = parse_norg(Quote(), [K"NestableItem", parents...], tokens, i) + child = parse_norg(Quote(), [K"NestableItem", parents...], tokens, i) elseif is_unordered_list(to_parse) - child = parse_norg(UnorderedList(), [K"NestableItem", parents...], tokens, i) + child = parse_norg(UnorderedList(), [K"NestableItem", parents...], tokens, i) elseif is_ordered_list(to_parse) - child = parse_norg(OrderedList(), [K"NestableItem", parents...], tokens, i) + child = parse_norg(OrderedList(), [K"NestableItem", parents...], tokens, i) elseif to_parse == K"Slide" child = parse_norg(Slide(), [K"NestableItem", parents...], tokens, i) elseif to_parse == K"IndentSegment" @@ -86,5 +90,5 @@ function parse_norg(::NestableItem, parents::Vector{Kind}, tokens::Vector{Token} if is_eof(tokens[i]) i = prevind(tokens, i) end - AST.Node(K"NestableItem", children, start, i) + return AST.Node(K"NestableItem", children, start, i) end diff --git a/src/parser/parser.jl b/src/parser/parser.jl index d2f5b9f..cdbc0a7 100644 --- a/src/parser/parser.jl +++ b/src/parser/parser.jl @@ -30,7 +30,9 @@ function parse_norg_toplevel_one_step(parents::Vector{Kind}, tokens::Vector{Toke m = match_norg(parents, tokens, i) to_parse = matched(m) if isclosing(m) - error("Closing token when parsing a top level element at token $(tokens[i]). This is a bug, please report it along with the text you are trying to parse.") + error( + "Closing token when parsing a top level element at token $(tokens[i]). This is a bug, please report it along with the text you are trying to parse.", + ) return AST.Node(K"None", AST.Node[], i, nextind(tokens, i)) elseif iscontinue(m) return AST.Node(K"None", AST.Node[], i, i) @@ -40,11 +42,11 @@ function parse_norg_toplevel_one_step(parents::Vector{Kind}, tokens::Vector{Toke stop = prevind(tokens, consume_until(K"LineEnding", tokens, i)) AST.Node(to_parse, AST.Node[], start, stop) elseif is_quote(to_parse) - parse_norg(Quote(), parents, tokens, i) + parse_norg(Quote(), parents, tokens, i) elseif is_unordered_list(to_parse) - parse_norg(UnorderedList(), parents, tokens, i) + parse_norg(UnorderedList(), parents, tokens, i) elseif is_ordered_list(to_parse) - parse_norg(OrderedList(), parents, tokens, i) + parse_norg(OrderedList(), parents, tokens, i) elseif kind(to_parse) == K"Verbatim" parse_norg(Verbatim(), parents, tokens, i) elseif kind(to_parse) == K"StandardRangedTag" @@ -92,7 +94,7 @@ function parse_norg(tokens::Vector{Token}) root = AST.Node(K"NorgDocument", children, firstindex(tokens), lastindex(tokens)) ast = AST.NorgDocument(root, tokens) findtargets!(ast) - ast + return ast end function parse_norg(::Paragraph, parents::Vector{Kind}, tokens::Vector{Token}, i) @@ -134,31 +136,33 @@ function parse_norg(::Paragraph, parents::Vector{Kind}, tokens::Vector{Token}, i elseif isclosing(m) && matched(m) != K"Paragraph" i = prevind(tokens, i) end - AST.Node(K"Paragraph", segments, start, i) + return AST.Node(K"Paragraph", segments, start, i) end """ Main dispatch utility. """ -function parse_norg_dispatch(to_parse::Kind, parents::Vector{Kind}, tokens::Vector{Token}, i) +function parse_norg_dispatch( + to_parse::Kind, parents::Vector{Kind}, tokens::Vector{Token}, i +) if to_parse == K"Escape" - parse_norg(Escape(), parents, tokens, i) + parse_norg(Escape(), parents, tokens, i) elseif to_parse == K"Bold" - parse_norg(Bold(), parents, tokens, i) + parse_norg(Bold(), parents, tokens, i) elseif to_parse == K"Italic" - parse_norg(Italic(), parents, tokens, i) + parse_norg(Italic(), parents, tokens, i) elseif to_parse == K"Underline" - parse_norg(Underline(), parents, tokens, i) + parse_norg(Underline(), parents, tokens, i) elseif to_parse == K"Strikethrough" - parse_norg(Strikethrough(), parents, tokens, i) + parse_norg(Strikethrough(), parents, tokens, i) elseif to_parse == K"Spoiler" - parse_norg(Spoiler(), parents, tokens, i) + parse_norg(Spoiler(), parents, tokens, i) elseif to_parse == K"Superscript" - parse_norg(Superscript(), parents, tokens, i) + parse_norg(Superscript(), parents, tokens, i) elseif to_parse == K"Subscript" - parse_norg(Subscript(), parents, tokens, i) + parse_norg(Subscript(), parents, tokens, i) elseif to_parse == K"InlineCode" - parse_norg(InlineCode(), parents, tokens, i) + parse_norg(InlineCode(), parents, tokens, i) elseif to_parse == K"NullModifier" parse_norg(NullModifier(), parents, tokens, i) elseif to_parse == K"InlineMath" @@ -166,21 +170,21 @@ function parse_norg_dispatch(to_parse::Kind, parents::Vector{Kind}, tokens::Vect elseif to_parse == K"Variable" parse_norg(Variable(), parents, tokens, i) elseif to_parse == K"FreeFormBold" - parse_norg(FreeFormBold(), parents, tokens, i) + parse_norg(FreeFormBold(), parents, tokens, i) elseif to_parse == K"FreeFormItalic" - parse_norg(FreeFormItalic(), parents, tokens, i) + parse_norg(FreeFormItalic(), parents, tokens, i) elseif to_parse == K"FreeFormUnderline" - parse_norg(FreeFormUnderline(), parents, tokens, i) + parse_norg(FreeFormUnderline(), parents, tokens, i) elseif to_parse == K"FreeFormStrikethrough" - parse_norg(FreeFormStrikethrough(), parents, tokens, i) + parse_norg(FreeFormStrikethrough(), parents, tokens, i) elseif to_parse == K"FreeFormSpoiler" - parse_norg(FreeFormSpoiler(), parents, tokens, i) + parse_norg(FreeFormSpoiler(), parents, tokens, i) elseif to_parse == K"FreeFormSuperscript" - parse_norg(FreeFormSuperscript(), parents, tokens, i) + parse_norg(FreeFormSuperscript(), parents, tokens, i) elseif to_parse == K"FreeFormSubscript" - parse_norg(FreeFormSubscript(), parents, tokens, i) + parse_norg(FreeFormSubscript(), parents, tokens, i) elseif to_parse == K"FreeFormInlineCode" - parse_norg(FreeFormInlineCode(), parents, tokens, i) + parse_norg(FreeFormInlineCode(), parents, tokens, i) elseif to_parse == K"FreeFormNullModifier" parse_norg(FreeFormNullModifier(), parents, tokens, i) elseif to_parse == K"FreeFormInlineMath" @@ -188,7 +192,7 @@ function parse_norg_dispatch(to_parse::Kind, parents::Vector{Kind}, tokens::Vect elseif to_parse == K"FreeFormVariable" parse_norg(FreeFormVariable(), parents, tokens, i) elseif to_parse == K"Link" - parse_norg(Link(), parents, tokens, i) + parse_norg(Link(), parents, tokens, i) elseif to_parse == K"Anchor" parse_norg(Anchor(), parents, tokens, i) elseif to_parse == K"InlineLinkTarget" @@ -196,7 +200,9 @@ function parse_norg_dispatch(to_parse::Kind, parents::Vector{Kind}, tokens::Vect elseif to_parse == K"Word" parse_norg(Word(), parents, tokens, i) else - error("parse_norg_dispatch got an unhandled node kind $to_parse for token $(tokens[i])") + error( + "parse_norg_dispatch got an unhandled node kind $to_parse for token $(tokens[i])", + ) end end @@ -243,7 +249,9 @@ function parse_norg(::ParagraphSegment, parents::Vector{Kind}, tokens::Vector{To elseif AST.start(first(siblings)) == start AST.Node(K"None", siblings, start, i) else - ps = AST.Node(K"ParagraphSegment", vcat(children, first(siblings).children), start, i) + ps = AST.Node( + K"ParagraphSegment", vcat(children, first(siblings).children), start, i + ) if length(siblings) > 1 AST.Node(K"None", [ps, siblings[2:end]...], start, i) else @@ -255,11 +263,11 @@ end function parse_norg(::Escape, parents::Vector{Kind}, tokens::Vector{Token}, i) next_i = nextind(tokens, i) w = parse_norg(Word(), parents, tokens, next_i) - AST.Node(K"Escape", AST.Node[w], i, next_i) + return AST.Node(K"Escape", AST.Node[w], i, next_i) end function parse_norg(::Word, parents::Vector{Kind}, tokens::Vector{Token}, i) - AST.Node(K"WordNode", AST.Node[], i, i) + return AST.Node(K"WordNode", AST.Node[], i, i) end include("attachedmodifier.jl") diff --git a/src/parser/rangeabledetachedmodifier.jl b/src/parser/rangeabledetachedmodifier.jl index c1f9a6a..c56b3dd 100644 --- a/src/parser/rangeabledetachedmodifier.jl +++ b/src/parser/rangeabledetachedmodifier.jl @@ -1,6 +1,8 @@ strategy_to_kind(::Definition) = K"Definition" strategy_to_kind(::Footnote) = K"Footnote" -function parse_norg(t::RangeableDetachedModifier, parents::Vector{Kind}, tokens::Vector{Token}, i) +function parse_norg( + t::RangeableDetachedModifier, parents::Vector{Kind}, tokens::Vector{Token}, i +) start = i parents = [strategy_to_kind(t), parents...] children = AST.Node[] @@ -13,7 +15,9 @@ function parse_norg(t::RangeableDetachedModifier, parents::Vector{Kind}, tokens: stop = prevind(tokens, consume_until(K"LineEnding", tokens, i)) if !isempty(children) child = last(children) - children[end] = AST.Node(K"RangeableItem", child.children, AST.start(child), stop) + children[end] = AST.Node( + K"RangeableItem", child.children, AST.start(child), stop + ) end i = stop end @@ -34,7 +38,7 @@ function parse_norg(t::RangeableDetachedModifier, parents::Vector{Kind}, tokens: push!(children, child) end - AST.Node(strategy_to_kind(t), children, start, i) + return AST.Node(strategy_to_kind(t), children, start, i) end function parse_norg(::RangeableItem, parents::Vector{Kind}, tokens::Vector{Token}, i) @@ -57,9 +61,11 @@ end function parse_norg_unranged_rangeable(parents, tokens, i) title_segment = parse_norg(ParagraphSegment(), parents, tokens, i) - paragraph = parse_norg(Paragraph(), parents, tokens, nextind(tokens, AST.stop(title_segment))) + paragraph = parse_norg( + Paragraph(), parents, tokens, nextind(tokens, AST.stop(title_segment)) + ) - AST.Node(K"RangeableItem", [title_segment, paragraph], i, AST.stop(paragraph)) + return AST.Node(K"RangeableItem", [title_segment, paragraph], i, AST.stop(paragraph)) end function parse_norg_ranged_rangeable(parents, tokens, i) @@ -70,7 +76,7 @@ function parse_norg_ranged_rangeable(parents, tokens, i) token = tokens[i] while !is_eof(token) m = match_norg(parents, tokens, i) - if isclosing(m) + if isclosing(m) if consume(m) i = consume_until(K"LineEnding", tokens, i) i = prevind(tokens, i) @@ -97,7 +103,7 @@ function parse_norg_ranged_rangeable(parents, tokens, i) else parse_norg(Paragraph(), parents, tokens, i) end - push!(children, child) + push!(children, child) i = nextind(tokens, AST.stop(child)) token = tokens[i] if is_eof(token) @@ -105,5 +111,7 @@ function parse_norg_ranged_rangeable(parents, tokens, i) token = tokens[i] end end - AST.Node(K"RangeableItem", [title_segment, children...]::Vector{AST.Node}, start, i) + return AST.Node( + K"RangeableItem", [title_segment, children...]::Vector{AST.Node}, start, i + ) end diff --git a/src/parser/structuralmodifier.jl b/src/parser/structuralmodifier.jl index 41c1722..e033439 100644 --- a/src/parser/structuralmodifier.jl +++ b/src/parser/structuralmodifier.jl @@ -25,7 +25,9 @@ function parse_norg(::Heading, parents::Vector{Kind}, tokens::Vector{Token}, i) i = consume_until(K"Whitespace", tokens, i) end end - title_segment = parse_norg(ParagraphSegment(), [heading_kind, parents...], tokens, i) + title_segment = parse_norg( + ParagraphSegment(), [heading_kind, parents...], tokens, i + ) push!(children, title_segment) i = nextind(tokens, AST.stop(title_segment)) while !is_eof(tokens[i]) @@ -39,7 +41,9 @@ function parse_norg(::Heading, parents::Vector{Kind}, tokens::Vector{Token}, i) elseif to_parse == K"WeakDelimitingModifier" start_del = i i = consume_until(K"LineEnding", tokens, i) - push!(children, AST.Node(K"WeakDelimitingModifier", AST.Node[], start_del, i)) + push!( + children, AST.Node(K"WeakDelimitingModifier", AST.Node[], start_del, i) + ) break elseif kind(to_parse) == K"StrongDelimitingModifier" i = prevind(tokens, i) @@ -57,7 +61,9 @@ function parse_norg(::Heading, parents::Vector{Kind}, tokens::Vector{Token}, i) elseif kind(to_parse) == K"Verbatim" child = parse_norg(Verbatim(), [heading_kind, parents...], tokens, i) elseif kind(to_parse) == K"StandardRangedTag" - child = parse_norg(StandardRangedTag(), [heading_kind, parents...], tokens, i) + child = parse_norg( + StandardRangedTag(), [heading_kind, parents...], tokens, i + ) elseif to_parse == K"WeakCarryoverTag" child = parse_norg(WeakCarryoverTag(), parents, tokens, i) elseif to_parse == K"Definition" @@ -80,11 +86,11 @@ function parse_norg(::Heading, parents::Vector{Kind}, tokens::Vector{Token}, i) if isclosing(m) && !(matched(m) == heading_kind && consume(m)) i = prevind(tokens, i) end - + AST.Node(heading_kind, children, start, i) else # if the stars are not followed by a whitespace # This should never happen if matching works correctly - # parse_norg(Paragraph(), parents, tokens, i) + # parse_norg(Paragraph(), parents, tokens, i) error("Matching for headings has a bug. Please report the issue.") end end diff --git a/src/parser/tag.jl b/src/parser/tag.jl index 59f1240..85005ce 100644 --- a/src/parser/tag.jl +++ b/src/parser/tag.jl @@ -28,7 +28,10 @@ function parse_tag_header(parents::Vector{Kind}, tokens::Vector{Token}, i) start_current = i while !is_eof(tokens[i]) && kind(token) != K"LineEnding" if is_whitespace(token) - push!(children, AST.Node(K"TagParameter", AST.Node[], start_current, prevind(tokens, i))) + push!( + children, + AST.Node(K"TagParameter", AST.Node[], start_current, prevind(tokens, i)), + ) i = nextind(tokens, i) start_current = i token = tokens[i] @@ -43,12 +46,15 @@ function parse_tag_header(parents::Vector{Kind}, tokens::Vector{Token}, i) end if kind(token) == K"LineEnding" if start_current < i - push!(children, AST.Node(K"TagParameter", AST.Node[], start_current, prevind(tokens, i))) + push!( + children, + AST.Node(K"TagParameter", AST.Node[], start_current, prevind(tokens, i)), + ) end i = nextind(tokens, i) token = tokens[i] end - children, i + return children, i end tag(::Verbatim) = K"Verbatim" @@ -56,7 +62,7 @@ tag(::StandardRangedTag) = K"StandardRangedTag" body(::Verbatim) = K"VerbatimBody" body(::StandardRangedTag) = K"StandardRangedTagBody" -function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where {T <: Tag} +function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where {T<:Tag} start = i children, i = parse_tag_header(parents, tokens, i) token = tokens[i] @@ -85,7 +91,7 @@ function parse_norg(t::T, parents::Vector{Kind}, tokens::Vector{Token}, i) where i = nextind(tokens, AST.stop(c)) end push!(children, AST.Node(body(t), body_children, start_content, stop_content)) - AST.Node(tag(t), children, start, i) + return AST.Node(tag(t), children, start, i) end function parse_norg(::WeakCarryoverTag, parents::Vector{Kind}, tokens::Vector{Token}, i) @@ -95,7 +101,12 @@ function parse_norg(::WeakCarryoverTag, parents::Vector{Kind}, tokens::Vector{To if kind(content) == K"Paragraph" || is_nestable(kind(content)) content_children = content.children first_segment = first(content_children) - content_children[1] = AST.Node(K"WeakCarryoverTag", [children..., first_segment], start, AST.stop(first_segment)) + content_children[1] = AST.Node( + K"WeakCarryoverTag", + [children..., first_segment], + start, + AST.stop(first_segment), + ) AST.Node(kind(content), content_children, AST.start(content), AST.stop(content)) else AST.Node(K"WeakCarryoverTag", [children..., content], start, AST.stop(content)) diff --git a/src/parser/verbatim.jl b/src/parser/verbatim.jl index e69de29..8b13789 100644 --- a/src/parser/verbatim.jl +++ b/src/parser/verbatim.jl @@ -0,0 +1 @@ + diff --git a/src/scanners.jl b/src/scanners.jl index 6c88670..feb173b 100644 --- a/src/scanners.jl +++ b/src/scanners.jl @@ -16,7 +16,12 @@ matching token was found. struct ScanResult length::Int64 end -ScanResult(res::Bool) = if res ScanResult(1) else ScanResult(0) end +ScanResult(res::Bool) = + if res + ScanResult(1) + else + ScanResult(0) + end success(scanresult::ScanResult) = scanresult.length > 0 abstract type ScanStrategy end @@ -60,7 +65,7 @@ function scan(list::AbstractArray, input) break end end - res + return res end function scan(set::Set{Char}, input) @@ -81,7 +86,7 @@ function scan(::Whitespace, input) break end end - ScanResult(trial_stop) + return ScanResult(trial_stop) end function scan(::Word, input) @@ -97,7 +102,7 @@ function scan(::Word, input) break end end - ScanResult(trial_stop) + return ScanResult(trial_stop) end scan(::LineEnding, input) = scan(NORG_LINE_ENDING, input) @@ -123,8 +128,12 @@ end All the registered [`Kinds.Kind`](@ref) that [`Scanners.scan`](@ref) will try when consuming entries. """ const TOKENKIND_PARSING_ORDER = [ - Kinds.all_single_punctuation_tokens()...; - K"x"; K"LineEnding"; K"Whitespace"; K"Punctuation"; K"Word" + Kinds.all_single_punctuation_tokens()... + K"x" + K"LineEnding" + K"Whitespace" + K"Punctuation" + K"Word" ] """ @@ -148,7 +157,7 @@ function scan(input; line=0, charnum=0) if !success(res) error("No suitable token found for input at line $line, char $charnum") end - Token(tokentype, line, charnum, input[1:res.length]) + return Token(tokentype, line, charnum, input[1:(res.length)]) end export scan diff --git a/src/semantics/timestamps.jl b/src/semantics/timestamps.jl index de0d6fc..d3b0304 100644 --- a/src/semantics/timestamps.jl +++ b/src/semantics/timestamps.jl @@ -17,7 +17,7 @@ Example usage: """ function parse_norg_timestamp(tokens, start, stop) i, t1 = parse_one_norg_timestamp(tokens, start, stop) - if kind(tokens[i]) == K"-" || (i <= stop && kind(tokens[i+1]) == K"-") + if kind(tokens[i]) == K"-" || (i <= stop && kind(tokens[i + 1]) == K"-") if kind(tokens[i]) != K"-" i += 1 end @@ -28,11 +28,10 @@ function parse_norg_timestamp(tokens, start, stop) i, t2 = parse_one_norg_timestamp(tokens, i, stop) t1, t2 = complete_timestamps(t1, t2) t1, t2 = to_datetime(t1), to_datetime(t2) - (;t1, t2) + (; t1, t2) else (t1=to_datetime(t1), t2=nothing) end - end function to_datetime(t) @@ -43,11 +42,11 @@ function to_datetime(t) end stop = findfirst(isnothing.(args)) if !isnothing(stop) - args = args[1:stop-1] + args = args[1:(stop - 1)] end if isempty(args) return nothing - end + end dt = DateTime(args...) if isnothing(t.timezone) dt @@ -74,7 +73,7 @@ function complete_timestamps(t1, t2) dt2[field] = f1 end end - (t1=NamedTuple(dt1), t2=NamedTuple(dt2)) + return (t1=NamedTuple(dt1), t2=NamedTuple(dt2)) end function warn_if_no_separator(param, tokens, i, stop) @@ -85,7 +84,9 @@ function warn_if_no_separator(param, tokens, i, stop) end function parse_one_norg_timestamp_should_return(tokens, i, stop) - i >= stop || kind(tokens[i]) == K"-" || (i <= stop && kind(tokens[i+1]) == K"-") + return i >= stop || + kind(tokens[i]) == K"-" || + (i <= stop && kind(tokens[i + 1]) == K"-") end function parse_one_norg_timestamp(tokens, start, stop) @@ -104,13 +105,13 @@ function parse_one_norg_timestamp(tokens, start, stop) i = nextind(tokens, i) token = tokens[i] if parse_one_norg_timestamp_should_return(tokens, i, stop) - return i, (;day, day_of_month, month, year, time, timezone) + return i, (; day, day_of_month, month, year, time, timezone) elseif kind(token) == K"," i = nextind(tokens, i) token = tokens[i] end if parse_one_norg_timestamp_should_return(tokens, i, stop) - return i, (;day, day_of_month, month, year, time, timezone) + return i, (; day, day_of_month, month, year, time, timezone) else warn_if_no_separator("Day", tokens, i, stop) i = nextind(tokens, i) @@ -124,7 +125,7 @@ function parse_one_norg_timestamp(tokens, start, stop) i = nextind(tokens, i) token = tokens[i] if parse_one_norg_timestamp_should_return(tokens, i, stop) - return i, (;day, day_of_month, month, year, time, timezone) + return i, (; day, day_of_month, month, year, time, timezone) else warn_if_no_separator("Day of the month", tokens, i, stop) i = nextind(tokens, i) @@ -137,7 +138,7 @@ function parse_one_norg_timestamp(tokens, start, stop) i = nextind(tokens, i) token = tokens[i] if parse_one_norg_timestamp_should_return(tokens, i, stop) - return i, (;day, day_of_month, month, year, time, timezone) + return i, (; day, day_of_month, month, year, time, timezone) else warn_if_no_separator("Month", tokens, i, stop) i = nextind(tokens, i) @@ -150,7 +151,7 @@ function parse_one_norg_timestamp(tokens, start, stop) i = nextind(tokens, i) token = tokens[i] if parse_one_norg_timestamp_should_return(tokens, i, stop) - return i, (;day, day_of_month, month, year, time, timezone) + return i, (; day, day_of_month, month, year, time, timezone) else warn_if_no_separator("Year", tokens, i, stop) i = nextind(tokens, i) @@ -162,9 +163,9 @@ function parse_one_norg_timestamp(tokens, start, stop) s = join(value.(tokens[i:next_space])) time = tryparse(Time, s, dateformat"HH:MM.SS") if !isnothing(time) - i = next_space+1 + i = next_space + 1 if parse_one_norg_timestamp_should_return(tokens, i, stop) - return i, (;day, day_of_month, month, year, time, timezone) + return i, (; day, day_of_month, month, year, time, timezone) else warn_if_no_separator("Time", tokens, i, stop) i = nextind(tokens, i) @@ -173,14 +174,14 @@ function parse_one_norg_timestamp(tokens, start, stop) end end if i <= stop - stop_timestamp = Parser.consume_until(KSet"Whitespace -", tokens, i)-2 + stop_timestamp = Parser.consume_until(KSet"Whitespace -", tokens, i) - 2 if stop_timestamp <= stop w = join(value.(tokens[i:stop_timestamp])) timezone = parse_timezone(w) i = stop_timestamp + 1 end end - return i, (;day, day_of_month, month, year, time, timezone) + return i, (; day, day_of_month, month, year, time, timezone) end function parse_day(tokens, start, _) @@ -230,7 +231,7 @@ end function parse_year(tokens, start, _) token = tokens[start] w = value(token) - tryparse(Int64, w) + return tryparse(Int64, w) end function parse_timezone(w) @@ -240,4 +241,3 @@ function parse_timezone(w) nothing end end - diff --git a/src/strategy.jl b/src/strategy.jl index b3cfa9f..cb55fc9 100644 --- a/src/strategy.jl +++ b/src/strategy.jl @@ -78,12 +78,17 @@ struct FreeFormInlineMath <: VerbatimAttachedModifierStrategy end struct FreeFormVariable <: VerbatimAttachedModifierStrategy end const FreeFormAttachedModifier = Union{ - FreeFormBold, FreeFormItalic, - FreeFormUnderline, FreeFormStrikethrough, - FreeFormSpoiler, FreeFormSuperscript, - FreeFormSubscript, FreeFormNullModifier, - FreeFormInlineCode, FreeFormInlineMath, - FreeFormVariable + FreeFormBold, + FreeFormItalic, + FreeFormUnderline, + FreeFormStrikethrough, + FreeFormSpoiler, + FreeFormSuperscript, + FreeFormSubscript, + FreeFormNullModifier, + FreeFormInlineCode, + FreeFormInlineMath, + FreeFormVariable, } struct Anchor <: FromNode end @@ -140,23 +145,23 @@ export Heading, HeadingTitle, DelimitingModifier, StrongDelimiter export WeakDelimiter, HorizontalRule, Nestable, UnorderedList, OrderedList export Quote, NestableItem export Tag, Verbatim, WeakCarryoverTag, StrongCarryoverTag, StandardRangedTag -export AttachedModifierStrategy, VerbatimAttachedModifierStrategy, Bold, Italic -export Underline, Strikethrough, Spoiler, Superscript, Subscript, InlineCode +export AttachedModifierStrategy, VerbatimAttachedModifierStrategy, Bold, Italic +export Underline, Strikethrough, Spoiler, Superscript, Subscript, InlineCode export NullModifier, InlineMath, Variable export FreeFormBold, FreeFormItalic, FreeFormUnderline, FreeFormStrikethrough export FreeFormSpoiler, FreeFormSuperscript, FreeFormSubscript, FreeFormInlineCode export FreeFormNullModifier, FreeFormInlineMath, FreeFormVariable, FreeFormAttachedModifier -export Anchor, Link, LinkLocation, URLLocation, LineNumberLocation -export DetachedModifierLocation, FileLocation, MagicLocation, NorgFileLocation -export WikiLocation, TimestampLocation, LinkDescription, LinkSubTarget, InlineLinkTarget +export Anchor, Link, LinkLocation, URLLocation, LineNumberLocation +export DetachedModifierLocation, FileLocation, MagicLocation, NorgFileLocation +export WikiLocation, TimestampLocation, LinkDescription, LinkSubTarget, InlineLinkTarget export RangeableDetachedModifier, Definition, Footnote, RangeableItem export DetachedModifierSuffix, Slide, IndentSegment export DetachedModifierExtension -export TodoExtension, TimestampExtension, PriorityExtension, DueDateExtension, StartDateExtension +export TodoExtension, + TimestampExtension, PriorityExtension, DueDateExtension, StartDateExtension export StatusUndone, StatusDone, StatusNeedFurtherInput, StatusUrgent export StatusRecurring, StatusInProgress, StatusOnHold, StatusCancelled export ParagraphSegment, Paragraph, Escape end - diff --git a/src/tokenize.jl b/src/tokenize.jl index b3ca054..c07a3f1 100644 --- a/src/tokenize.jl +++ b/src/tokenize.jl @@ -19,7 +19,7 @@ function tokenize(input::AbstractString) result = [SOFToken()] while i <= lastindex(input) sub = SubString(input, i) - token = Scanners.scan(sub, line = linenum, charnum = charnum) + token = Scanners.scan(sub; line=linenum, charnum=charnum) if is_line_ending(token) linenum += 1 charnum = 1 @@ -30,7 +30,7 @@ function tokenize(input::AbstractString) i = nextind(input, i, length(token)) end push!(result, EOFToken()) - result + return result end export tokenize diff --git a/src/tokens.jl b/src/tokens.jl index 357e319..1240d54 100644 --- a/src/tokens.jl +++ b/src/tokens.jl @@ -45,11 +45,13 @@ end Create a `Token` of kind `kind` with value `value` at `line` and char number `char`. """ function Token(kind, line, char, value) - Token(kind, TokenPosition(line, char), value) + return Token(kind, TokenPosition(line, char), value) end function Base.show(io::IO, token::Token) - print(io, - "Token(K\"$(string(kind(token)))\", \"$(value(token))\", line $(string(line(token))), col. $(string(char(token))))") + return print( + io, + "Token(K\"$(string(kind(token)))\", \"$(value(token))\", line $(string(line(token))), col. $(string(char(token))))", + ) end SOFToken() = Token(K"StartOfFile", 0, 0, SubString("")) EOFToken() = Token(K"EndOfFile", 0, 0, SubString("")) diff --git a/src/utils.jl b/src/utils.jl index d8c6b6e..bf130c3 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -13,7 +13,7 @@ function consume_until(k::Kind, tokens::Vector{Token}, i) if kind(token) == k i = nextind(tokens, i) end - i + return i end function consume_until(k, tokens::Vector{Token}, i) token = tokens[i] @@ -24,7 +24,7 @@ function consume_until(k, tokens::Vector{Token}, i) if kind(token) ∈ k i = nextind(tokens, i) end - i + return i end """ @@ -34,7 +34,7 @@ Make some text suitable for using it as an id in a document. """ function idify(text) words = map(lowercase, split(text, r"\W+")) - join(filter(!isempty, words), '-') + return join(filter(!isempty, words), '-') end """ @@ -58,30 +58,18 @@ Return all children and grandchildren of kind `k`. It can also `exclude` certain nodes from recursion. """ function getchildren(node::Node, k::Kind) - filter( - x->kind(x)==k, - collect(PreOrderDFS( - x->kind(x)!=k, - node - )) - ) + return filter(x -> kind(x) == k, collect(PreOrderDFS(x -> kind(x) != k, node))) end function getchildren(node::Node, k::Kind, exclude::Kind) - filter( - x->kind(x)==k, - collect(PreOrderDFS( - x->kind(x)!=k && kind(x)!=exclude, - node - )) + return filter( + x -> kind(x) == k, + collect(PreOrderDFS(x -> kind(x) != k && kind(x) != exclude, node)), ) end function getchildren(node::Node, k::Kind, exclude) - filter( - x->kind(x)==k, - collect(PreOrderDFS( - x->kind(x)!=k && kind(x)∉exclude, - node - )) + return filter( + x -> kind(x) == k, + collect(PreOrderDFS(x -> kind(x) != k && kind(x) ∉ exclude, node)), ) end @@ -107,10 +95,10 @@ function findtargets!(ast::NorgDocument) end function findtargets!(ast::NorgDocument, node::Node) if AST.is_heading(node) - push!(ast.targets, textify(ast, first(children(node)))=>(kind(node), Ref(node))) + push!(ast.targets, textify(ast, first(children(node))) => (kind(node), Ref(node))) elseif kind(node) ∈ KSet"Definition Footnote" for c in children(node) - push!(ast.targets, textify(ast, first(children(c)))=>(kind(node), Ref(c))) + push!(ast.targets, textify(ast, first(children(c))) => (kind(node), Ref(c))) end end end diff --git a/test/ast_tests/test_detached_modifier_extension.jl b/test/ast_tests/test_detached_modifier_extension.jl index c94cca7..7997164 100644 --- a/test/ast_tests/test_detached_modifier_extension.jl +++ b/test/ast_tests/test_detached_modifier_extension.jl @@ -14,9 +14,9 @@ todos = [ ("_", K"StatusCancelled") ] -@testset "Extension on detached modifier '$m'." for m in detached_modifier +@testset "Extension on detached modifier '$m'." for m in detached_modifier @testset "Level" for n in 1:7 - @testset "Simple Todos: ($t)" for (t,res) in todos + @testset "Simple Todos: ($t)" for (t, res) in todos s = "$(repeat(m, n)) ($t) hey" ast = norg(s) nestable = first(children(ast.root)) @@ -64,7 +64,7 @@ todos = [ ext, p = children(item) @test kind(ext) == K"TimestampExtension" end - @testset "Todos chained with timestamp: ($t)" for (t,res) in todos + @testset "Todos chained with timestamp: ($t)" for (t, res) in todos s = "$(repeat(m, n)) ($t|@ Tuesday) hey" ast = norg(s) nestable = first(children(ast.root)) @@ -78,7 +78,7 @@ todos = [ ts_ext = last(children(ext)) @test kind(ts_ext) == K"TimestampExtension" end - @testset "Todos chained with due date: ($t)" for (t,res) in todos + @testset "Todos chained with due date: ($t)" for (t, res) in todos s = "$(repeat(m, n)) ($t|< Tuesday) hey" ast = norg(s) nestable = first(children(ast.root)) @@ -92,7 +92,7 @@ todos = [ ts_ext = last(children(ext)) @test kind(ts_ext) == K"DueDateExtension" end - @testset "Todos chained with start date: ($t)" for (t,res) in todos + @testset "Todos chained with start date: ($t)" for (t, res) in todos s = "$(repeat(m, n)) ($t|> Tuesday) hey" ast = norg(s) nestable = first(children(ast.root)) @@ -106,7 +106,7 @@ todos = [ ts_ext = last(children(ext)) @test kind(ts_ext) == K"StartDateExtension" end - @testset "Todos chained with priority: ($t)" for (t,res) in todos + @testset "Todos chained with priority: ($t)" for (t, res) in todos s = "$(repeat(m, n)) ($t|# A) hey" ast = norg(s) nestable = first(children(ast.root)) @@ -122,4 +122,3 @@ todos = [ end end end - diff --git a/test/ast_tests/test_detached_modifier_suffix.jl b/test/ast_tests/test_detached_modifier_suffix.jl index 552a447..d541062 100644 --- a/test/ast_tests/test_detached_modifier_suffix.jl +++ b/test/ast_tests/test_detached_modifier_suffix.jl @@ -3,39 +3,65 @@ AST = Norg.AST textify = Norg.Codegen.textify slide_children = [ - (K"Definition", """\$ Single definition - Hello world""") - (K"Definition", """\$\$ Longer definition - Hello - It's me - \$\$""") - (K"Definition", """\$ Grouped definition - hey - \$ Another one - ho""") - (K"Footnote", """^ Single footnote - Hello world""") - (K"Footnote", """^^ Longer footnote - Hello - It's me - ^^""") - (K"Footnote", """^ Grouped footnote - hey - ^ Another one - ho""") - (K"Verbatim", """@verb foo - This is some very cody code. - @end""") - (K"Paragraph", """I'm a simple paragraph. - Pretty unimpressive eh?""") + ( + K"Definition", + """\$ Single definition +Hello world""", + ) + ( + K"Definition", + """\$\$ Longer definition +Hello +It's me +\$\$""", + ) + ( + K"Definition", + """\$ Grouped definition +hey +\$ Another one +ho""", + ) + ( + K"Footnote", + """^ Single footnote +Hello world""", + ) + ( + K"Footnote", + """^^ Longer footnote +Hello +It's me +^^""", + ) + ( + K"Footnote", + """^ Grouped footnote +hey +^ Another one +ho""", + ) + ( + K"Verbatim", + """@verb foo +This is some very cody code. +@end""", + ) + ( + K"Paragraph", + """I'm a simple paragraph. +Pretty unimpressive eh?""", + ) ] -nestable = [('-', K"UnorderedList1") - ('~', K"OrderedList1") - ('>', K"Quote1")] +nestable = [ + ('-', K"UnorderedList1") + ('~', K"OrderedList1") + ('>', K"Quote1") +] @testset "Slide can have $(child_T) children" for (child_T, child_text) in slide_children - for (m,nestable_T) in nestable + for (m, nestable_T) in nestable s = """$m First line $m : $(child_text) @@ -43,7 +69,7 @@ nestable = [('-', K"UnorderedList1") ast = norg(s) nest = first(children(ast.root)) @test kind(nest) == nestable_T - i1,i2,i3 = children(nest) + i1, i2, i3 = children(nest) @test kind(i1) == K"NestableItem" @test kind(i2) == K"NestableItem" @test kind(i3) == K"NestableItem" @@ -72,7 +98,6 @@ end @test kind(p2) == K"Paragraph" @test textify(ast, p1) == "This is a paragraph." @test textify(ast, p2) == "This is another paragraph inside of the same list item." - end @testset "Delimiter precendence in indent segment" begin ast = norg"""* Heading @@ -92,7 +117,7 @@ end @test kind(li) == K"NestableItem" is = first(children(li)) @test kind(is) == K"IndentSegment" - p,wd = children(is) + p, wd = children(is) @test kind(wd) == K"WeakDelimitingModifier" @test kind(p) == K"Paragraph" @test textify(ast, is) == "Text" @@ -113,21 +138,21 @@ end This is not a part of any indent segment. """ - ul,sd,p = children(ast.root) + ul, sd, p = children(ast.root) @test kind(ul) == K"UnorderedList1" @test kind(sd) == K"StrongDelimitingModifier" @test kind(p) == K"Paragraph" @test textify(ast, p) == "This is not a part of any indent segment." is = first(children(first(children(ul)))) @test kind(is) == K"IndentSegment" - p1,p2,ul = children(is) + p1, p2, ul = children(is) @test kind(p1) == K"Paragraph" @test kind(p2) == K"Paragraph" @test textify(ast, p1) == "This is an indent segment." @test textify(ast, p2) == "This paragraph should also belong to the indent segment." @test kind(ul) == K"UnorderedList2" is = first(children(first(children(ul)))) - p,verb = children(is) + p, verb = children(is) @test kind(p) == K"Paragraph" @test kind(verb) == K"Verbatim" end diff --git a/test/ast_tests/test_headings.jl b/test/ast_tests/test_headings.jl index c38e0b7..1513cba 100644 --- a/test/ast_tests/test_headings.jl +++ b/test/ast_tests/test_headings.jl @@ -11,7 +11,7 @@ Node = Norg.AST.Node ast = norg(s) - h1,sd,p = children(ast.root) + h1, sd, p = children(ast.root) @test kind(h1) == K"Heading1" @test kind(p) == K"Paragraph" @@ -164,7 +164,7 @@ end $m$m$m There """) - p1,delim,p2 = children(ast.root) + p1, delim, p2 = children(ast.root) @test Norg.Codegen.textify(ast, p1) == "Hello" @test Norg.Codegen.textify(ast, p2) == "There" end diff --git a/test/ast_tests/test_links.jl b/test/ast_tests/test_links.jl index dc57500..2500a7a 100644 --- a/test/ast_tests/test_links.jl +++ b/test/ast_tests/test_links.jl @@ -1,21 +1,22 @@ Node = Norg.AST.Node AST = Norg.AST -simple_link_tests = [":norg_file:" => K"NorgFileLocation" - "* heading" => K"DetachedModifierLocation" - "** heading" => K"DetachedModifierLocation" - "*** heading" => K"DetachedModifierLocation" - "**** heading" => K"DetachedModifierLocation" - "***** heading" => K"DetachedModifierLocation" - "****** heading" => K"DetachedModifierLocation" - "******* heading" => K"DetachedModifierLocation" - "# magic" => K"MagicLocation" - "42" => K"LineNumberLocation" - "https://example.org" => K"URLLocation" - "file://example.txt" => K"URLLocation" - "/ example.txt" => K"FileLocation" - "? test" => K"WikiLocation" - "@ Wednesday" => K"TimestampLocation" +simple_link_tests = [ + ":norg_file:" => K"NorgFileLocation" + "* heading" => K"DetachedModifierLocation" + "** heading" => K"DetachedModifierLocation" + "*** heading" => K"DetachedModifierLocation" + "**** heading" => K"DetachedModifierLocation" + "***** heading" => K"DetachedModifierLocation" + "****** heading" => K"DetachedModifierLocation" + "******* heading" => K"DetachedModifierLocation" + "# magic" => K"MagicLocation" + "42" => K"LineNumberLocation" + "https://example.org" => K"URLLocation" + "file://example.txt" => K"URLLocation" + "/ example.txt" => K"FileLocation" + "? test" => K"WikiLocation" + "@ Wednesday" => K"TimestampLocation" ] @testset "basic links: $target" for (link, target) in simple_link_tests @@ -28,7 +29,7 @@ simple_link_tests = [":norg_file:" => K"NorgFileLocation" @test kind(l) == K"Link" @test kind(loc) == target @test kind(space) == K"WordNode" - @test join(value.(ast.tokens[space.start:space.stop])) == " " + @test join(value.(ast.tokens[(space.start):(space.stop)])) == " " end @testset "basic links with description: $target" for (link, target) in simple_link_tests @@ -44,10 +45,11 @@ end @test kind(descr) == K"LinkDescription" descr_ps = first(children(descr)) descr_word = first(children(descr_ps)) - @test join(value.(ast.tokens[descr_word.start:descr_word.stop])) == "descr" + @test join(value.(ast.tokens[(descr_word.start):(descr_word.stop)])) == "descr" end -@testset "Checking markup in link description :$link => $target" for (link, target) in simple_link_tests +@testset "Checking markup in link description :$link => $target" for (link, target) in + simple_link_tests s = "{$link}[*descr*]" ast = norg(s) p = first(children(ast.root)) @@ -83,17 +85,18 @@ end @test kind(target) == K"FileNorgRootTarget" end -subtarget_tests = [":file:1" => K"LineNumberLocation" - ":file:* heading" => K"DetachedModifierLocation" - ":file:** heading" => K"DetachedModifierLocation" - ":file:*** heading" => K"DetachedModifierLocation" - ":file:**** heading" => K"DetachedModifierLocation" - ":file:***** heading" => K"DetachedModifierLocation" - ":file:****** heading" => K"DetachedModifierLocation" - ":file:******* heading" => K"DetachedModifierLocation" - ":file:# magic" => K"MagicLocation" - "/ file.txt:1" => K"LineNumberLocation" - "? test:file:" => K"NorgFileLocation" +subtarget_tests = [ + ":file:1" => K"LineNumberLocation" + ":file:* heading" => K"DetachedModifierLocation" + ":file:** heading" => K"DetachedModifierLocation" + ":file:*** heading" => K"DetachedModifierLocation" + ":file:**** heading" => K"DetachedModifierLocation" + ":file:***** heading" => K"DetachedModifierLocation" + ":file:****** heading" => K"DetachedModifierLocation" + ":file:******* heading" => K"DetachedModifierLocation" + ":file:# magic" => K"MagicLocation" + "/ file.txt:1" => K"LineNumberLocation" + "? test:file:" => K"NorgFileLocation" ] @testset "Checking subtarget :$link => $target" for (link, target) in subtarget_tests s = "{$link}" @@ -111,9 +114,9 @@ leaves_tests = [ "** heading" => [K"Heading2", K"WordNode"] "*** heading" => [K"Heading3", K"WordNode"] "**** heading" => [K"Heading4", K"WordNode"] - "***** heading" =>[K"Heading5", K"WordNode"] - "****** heading" =>[K"Heading6", K"WordNode"] - "******* heading" =>[K"Heading6", K"WordNode"] + "***** heading" => [K"Heading5", K"WordNode"] + "****** heading" => [K"Heading6", K"WordNode"] + "******* heading" => [K"Heading6", K"WordNode"] "# magic" => [K"WordNode"] "42" => [K"LineNumberTarget"] "https://example.org" => [K"URLTarget"] @@ -124,7 +127,7 @@ leaves_tests = [ @testset "Checking leaves :$link => $target" for (link, target) in leaves_tests s = "{$link}" ast = norg(s) - for (l,t) in zip(collect(Leaves(ast.root)), target) + for (l, t) in zip(collect(Leaves(ast.root)), target) @test kind(l) == t end end @@ -148,53 +151,85 @@ end @test kind(last(children(anchor2))) == K"URLLocation" end -anchor_tests = [(input = "[heading 1 anchor]\n\n[heading 1 anchor]{* Heading 1}", -target = K"DetachedModifierLocation") - (input = "[heading 2 anchor]\n\n[heading 2 anchor]{** Heading 2}", - target = K"DetachedModifierLocation") - (input = "[heading 3 anchor]\n\n[heading 3 anchor]{*** Heading 3}", - target = K"DetachedModifierLocation") - (input = "[heading 4 anchor]\n\n[heading 4 anchor]{**** Heading 4}", - target = K"DetachedModifierLocation") - (input = "[heading 5 anchor]\n\n[heading 5 anchor]{***** Heading 5}", - target = K"DetachedModifierLocation") - (input = "[heading 6 anchor]\n\n[heading 6 anchor]{****** Heading 6}", - target = K"DetachedModifierLocation") - (input = "[heading 7 anchor]\n\n[heading 7 anchor]{******* Heading 7}", - target = K"DetachedModifierLocation") - (input = "[generic anchor]\n\n[generic anchor]{# Generic}", - target = K"MagicLocation") - (input = "[norg file anchor]\n\n[norg file anchor]{:norg_file:}", - target = K"NorgFileLocation") - (input = "[external heading 1 anchor]\n\n[external heading 1 anchor]{:norg_file:* Heading 1}", - target = K"NorgFileLocation", - subtarget = K"DetachedModifierLocation") - (input = "[external heading 2 anchor]\n\n[external heading 2 anchor]{:norg_file:** Heading 2}", - target = K"NorgFileLocation", - subtarget = K"DetachedModifierLocation") - (input = "[external heading 3 anchor]\n\n[external heading 3 anchor]{:norg_file:*** Heading 3}", - target = K"NorgFileLocation", - subtarget = K"DetachedModifierLocation") - (input = "[external heading 4 anchor]\n\n[external heading 4 anchor]{:norg_file:**** Heading 4}", - target = K"NorgFileLocation", - subtarget = K"DetachedModifierLocation") - (input = "[external heading 5 anchor]\n\n[external heading 5 anchor]{:norg_file:***** Heading 5}", - target = K"NorgFileLocation", - subtarget = K"DetachedModifierLocation") - (input = "[external heading 7 anchor]\n\n[external heading 6 anchor]{:norg_file:****** Heading 6}", - target = K"NorgFileLocation", - subtarget = K"DetachedModifierLocation") - (input = "[external generic anchor]\n\n[external generic anchor]{:norg_file:# Generic}", - target = K"NorgFileLocation", - subtarget = K"MagicLocation") - (input = "[non-norg file anchor]\n\n[non-norg file anchor]{/ external_file.txt}", - target = K"FileLocation") - (input = "[url anchor]\n\n[url anchor]{https://github.com/}", - target = K"URLLocation") - (input = "[file anchor]\n\n[file anchor]{file:///dev/null}", - target = K"URLLocation") - (input = "[timestamp anchor]\n\n[timestamp anchor]{@ Wednesday}", - target = K"TimestampLocation") +anchor_tests = [ + ( + input="[heading 1 anchor]\n\n[heading 1 anchor]{* Heading 1}", + target=K"DetachedModifierLocation", + ) + ( + input="[heading 2 anchor]\n\n[heading 2 anchor]{** Heading 2}", + target=K"DetachedModifierLocation", + ) + ( + input="[heading 3 anchor]\n\n[heading 3 anchor]{*** Heading 3}", + target=K"DetachedModifierLocation", + ) + ( + input="[heading 4 anchor]\n\n[heading 4 anchor]{**** Heading 4}", + target=K"DetachedModifierLocation", + ) + ( + input="[heading 5 anchor]\n\n[heading 5 anchor]{***** Heading 5}", + target=K"DetachedModifierLocation", + ) + ( + input="[heading 6 anchor]\n\n[heading 6 anchor]{****** Heading 6}", + target=K"DetachedModifierLocation", + ) + ( + input="[heading 7 anchor]\n\n[heading 7 anchor]{******* Heading 7}", + target=K"DetachedModifierLocation", + ) + (input="[generic anchor]\n\n[generic anchor]{# Generic}", target=K"MagicLocation") + ( + input="[norg file anchor]\n\n[norg file anchor]{:norg_file:}", + target=K"NorgFileLocation", + ) + ( + input="[external heading 1 anchor]\n\n[external heading 1 anchor]{:norg_file:* Heading 1}", + target=K"NorgFileLocation", + subtarget=K"DetachedModifierLocation", + ) + ( + input="[external heading 2 anchor]\n\n[external heading 2 anchor]{:norg_file:** Heading 2}", + target=K"NorgFileLocation", + subtarget=K"DetachedModifierLocation", + ) + ( + input="[external heading 3 anchor]\n\n[external heading 3 anchor]{:norg_file:*** Heading 3}", + target=K"NorgFileLocation", + subtarget=K"DetachedModifierLocation", + ) + ( + input="[external heading 4 anchor]\n\n[external heading 4 anchor]{:norg_file:**** Heading 4}", + target=K"NorgFileLocation", + subtarget=K"DetachedModifierLocation", + ) + ( + input="[external heading 5 anchor]\n\n[external heading 5 anchor]{:norg_file:***** Heading 5}", + target=K"NorgFileLocation", + subtarget=K"DetachedModifierLocation", + ) + ( + input="[external heading 7 anchor]\n\n[external heading 6 anchor]{:norg_file:****** Heading 6}", + target=K"NorgFileLocation", + subtarget=K"DetachedModifierLocation", + ) + ( + input="[external generic anchor]\n\n[external generic anchor]{:norg_file:# Generic}", + target=K"NorgFileLocation", + subtarget=K"MagicLocation", + ) + ( + input="[non-norg file anchor]\n\n[non-norg file anchor]{/ external_file.txt}", + target=K"FileLocation", + ) + (input="[url anchor]\n\n[url anchor]{https://github.com/}", target=K"URLLocation") + (input="[file anchor]\n\n[file anchor]{file:///dev/null}", target=K"URLLocation") + ( + input="[timestamp anchor]\n\n[timestamp anchor]{@ Wednesday}", + target=K"TimestampLocation", + ) ] @testset "Testing anchor : $(t.target)" for t in anchor_tests @@ -223,109 +258,106 @@ end end @testset "Endlines in linkables." begin + @testset "Invalid endlines for target $(repr(k))" for (k, _) in simple_link_tests + invalid_singles = [ + """this is not a { + $k} + """ + """nor is this a [linkable + ] + """ + """{ + $k}""" + """{ + $k + }""" + """{$k + }""" + """{ $k}""" + ] + @testset "Invalid examples : $(repr(s))" for s in invalid_singles + ast = norg(s) + @test !any(kind(n) == K"Link" for n in collect(PreOrderDFS(ast))) + end + invalid_complexes = [ + """{$k}[invalid + ]""" + """[invalide]{$k + }""" + """{$k}[ + text + ]""" + """{$k}[text + ]""" + """{$k}[ + text]""" + ] + @testset "Invalid examples : $(repr(s))" for s in invalid_complexes + ast = norg(s) + p = first(children(ast.root)) + ps1, ps2 = children(p) + @test kind(ps1) == K"ParagraphSegment" + @test kind(ps2) == K"ParagraphSegment" + l, ws... = children(ps1) + @test kind(l) == K"Link" || kind(l) == K"Anchor" + @test all(kind.(ws) .== Ref(K"WordNode")) + end -@testset "Invalid endlines for target $(repr(k))" for (k,_) in simple_link_tests - invalid_singles = [ - """this is not a { - $k} - """ - """nor is this a [linkable - ] - """ - """{ - $k}""" - """{ - $k - }""" - """{$k - }""" - """{ $k}""" - ] -@testset "Invalid examples : $(repr(s))" for s in invalid_singles - ast = norg(s) - @test !any(kind(n) == K"Link" for n in collect(PreOrderDFS(ast))) -end - invalid_complexes = [ - """{$k}[invalid - ]""" - """[invalide]{$k - }""" - """{$k}[ - text - ]""" - """{$k}[text - ]""" - """{$k}[ - text]""" - ] -@testset "Invalid examples : $(repr(s))" for s in invalid_complexes - ast = norg(s) - p = first(children(ast.root)) - ps1, ps2 = children(p) - @test kind(ps1) == K"ParagraphSegment" - @test kind(ps2) == K"ParagraphSegment" - l, ws... = children(ps1) - @test kind(l) == K"Link" || kind(l) == K"Anchor" - @test all(kind.(ws) .== Ref(K"WordNode")) -end + invalid_inlines = [ + """< + hi>""" + """""" + ] + @testset "Invalid examples : $(repr(s))" for s in invalid_inlines + ast = norg(s) + @test !any(kind(n) == K"InlineLinkTarget" for n in collect(PreOrderDFS(ast))) + end + end - invalid_inlines = [ - """< - hi>""" - """""" - ] -@testset "Invalid examples : $(repr(s))" for s in invalid_inlines - ast = norg(s) - @test !any(kind(n) == K"InlineLinkTarget" for n in collect(PreOrderDFS(ast))) -end -end + @testset "Valid endlines" begin + valid_singles = [ + "{* some\ntext }" + "{# link\n text}" + "{* a link\nto a heading}" + ] + @testset "Valid examples : $(repr(s))" for s in valid_singles + ast = norg(s) + p = first(children(ast.root)) + ps = first(children(p)) + @test kind(first(children(ps))) == K"Link" + end + valid_complexes = [ + "{/ ~\n myfile.txt}[the `~` character is /not/ treated as a trailing modifier]" + "{* a\n link to a heading}[with\n a description]" + ] + @testset "Valid examples : $(repr(s))" for s in valid_complexes + ast = norg(s) + p = first(children(ast.root)) + ps = first(children(p)) + l = first(children(ps)) + @test kind(l) == K"Link" + loc, descr = children(l) + @test AST.is_link_location(loc) + @test kind(descr) == K"LinkDescription" + end + s = "[te\n xt]{# linkable}" + ast = norg(s) + p = first(children(ast.root)) + ps = first(children(p)) + a = first(children(ps)) + @test kind(a) == K"Anchor" + descr, loc = children(a) + @test AST.is_link_location(loc) + @test kind(descr) == K"LinkDescription" -@testset "Valid endlines" begin - valid_singles = [ - "{* some\ntext }" - "{# link\n text}" - "{* a link\nto a heading}" - ] -@testset "Valid examples : $(repr(s))" for s in valid_singles - ast = norg(s) - p = first(children(ast.root)) - ps = first(children(p)) - @test kind(first(children(ps))) == K"Link" -end - valid_complexes = [ - "{/ ~\n myfile.txt}[the `~` character is /not/ treated as a trailing modifier]" - "{* a\n link to a heading}[with\n a description]" - ] -@testset "Valid examples : $(repr(s))" for s in valid_complexes - ast = norg(s) - p = first(children(ast.root)) - ps = first(children(p)) - l = first(children(ps)) - @test kind(l) == K"Link" - loc, descr = children(l) - @test AST.is_link_location(loc) - @test kind(descr) == K"LinkDescription" -end - s = "[te\n xt]{# linkable}" - ast = norg(s) - p = first(children(ast.root)) - ps = first(children(p)) - a = first(children(ps)) - @test kind(a) == K"Anchor" - descr,loc = children(a) - @test AST.is_link_location(loc) - @test kind(descr) == K"LinkDescription" - - valid_inlines = [ - """""" - ] -@testset "Valid examples : $(repr(s))" for s in valid_inlines - ast = norg(s) - p = first(children(ast.root)) - ps = first(children(p)) - @test kind(first(children(ps))) == K"InlineLinkTarget" -end -end + valid_inlines = [""""""] + @testset "Valid examples : $(repr(s))" for s in valid_inlines + ast = norg(s) + p = first(children(ast.root)) + ps = first(children(p)) + @test kind(first(children(ps))) == K"InlineLinkTarget" + end + end end diff --git a/test/ast_tests/test_markup.jl b/test/ast_tests/test_markup.jl index 1e88890..f06d724 100644 --- a/test/ast_tests/test_markup.jl +++ b/test/ast_tests/test_markup.jl @@ -5,20 +5,20 @@ AST = Norg.AST textify = Norg.Codegen.textify simple_markups = [ -("*", K"Bold"), -("/", K"Italic") , -("_", K"Underline"), -("-", K"Strikethrough"), -("!", K"Spoiler"), -("^", K"Superscript"), -(",", K"Subscript"), -("`", K"InlineCode"), -("%", K"NullModifier"), -("\$", K"InlineMath"), -("&", K"Variable") + ("*", K"Bold"), + ("/", K"Italic"), + ("_", K"Underline"), + ("-", K"Strikethrough"), + ("!", K"Spoiler"), + ("^", K"Superscript"), + (",", K"Subscript"), + ("`", K"InlineCode"), + ("%", K"NullModifier"), + ("\$", K"InlineMath"), + ("&", K"Variable"), ] -@testset "Standalone markup for $m" for (m,k) in simple_markups +@testset "Standalone markup for $m" for (m, k) in simple_markups ast = norg("$(m)inner$(m)") @test ast isa Norg.AST.NorgDocument p = first(children(ast.root)) @@ -31,7 +31,7 @@ simple_markups = [ @test kind(ps) == K"ParagraphSegment" w = first(children(ps)) @test kind(w) == K"WordNode" - @test join(Norg.Tokens.value.(ast.tokens[w.start:w.stop])) == "inner" + @test join(Norg.Tokens.value.(ast.tokens[(w.start):(w.stop)])) == "inner" end @testset "Markup inside a sentence for $m" for (m, k) in simple_markups @@ -47,7 +47,7 @@ end @test kind(ps) == K"ParagraphSegment" w = first(children(ps)) @test kind(w) == K"WordNode" - @test join(Norg.Tokens.value.(ast.tokens[w.start:w.stop])) == "inner" + @test join(Norg.Tokens.value.(ast.tokens[(w.start):(w.stop)])) == "inner" end simple_nested_outer = [ @@ -62,7 +62,8 @@ simple_nested_outer = [ ] @testset "Nested markup $n inside $m" for (m, T) in simple_nested_outer, - (n, U) in simple_markups + (n, U) in simple_markups + if m == n continue end @@ -78,16 +79,12 @@ simple_nested_outer = [ @test kind(ps) == K"ParagraphSegment" w = first(children(ps)) @test kind(w) == K"WordNode" - @test join(Norg.Tokens.value.(ast.tokens[w.start:w.stop])) == "inner" + @test join(Norg.Tokens.value.(ast.tokens[(w.start):(w.stop)])) == "inner" end -verbatim_nested = [ - ("`", K"InlineCode"), - ("\$", K"InlineMath"), - ("&", K"Variable") -] +verbatim_nested = [("`", K"InlineCode"), ("\$", K"InlineMath"), ("&", K"Variable")] -@testset "Verbatim markup nesting test: $V" for (v,V) in verbatim_nested +@testset "Verbatim markup nesting test: $V" for (v, V) in verbatim_nested @testset "Nested markup $T inside $V" for (m, T) in simple_markups if occursin(m, "`\$&") continue @@ -144,7 +141,7 @@ end s = "*/Bold and italic*/" ast = norg(s) ps = first(children(first(children(ast.root)))) - b,w = children(ps) + b, w = children(ps) @test kind(b) == K"Bold" @test kind(w) == K"WordNode" ps = first(children(b)) @@ -196,10 +193,10 @@ end @test kind(ic2) == K"InlineCode" end -@testset "Link modifier for: $T" for (m,T) in simple_markups +@testset "Link modifier for: $T" for (m, T) in simple_markups ast = norg("Intra:$(m)word$(m):markup") ps = first(children(first(children(ast.root)))) - w1,mark,w2 = children(ps) + w1, mark, w2 = children(ps) @test kind(w1) == K"WordNode" @test kind(mark) == T @test kind(w2) == K"WordNode" @@ -208,19 +205,18 @@ end @test textify(ast, w2) == "markup" end - simple_freeformmarkups = [ -("*", K"FreeFormBold"), -("/", K"FreeFormItalic") , -("_", K"FreeFormUnderline"), -("-", K"FreeFormStrikethrough"), -("!", K"FreeFormSpoiler"), -("^", K"FreeFormSuperscript"), -(",", K"FreeFormSubscript"), -("`", K"FreeFormInlineCode"), -("%", K"FreeFormNullModifier"), -("\$", K"FreeFormInlineMath"), -("&", K"FreeFormVariable") + ("*", K"FreeFormBold"), + ("/", K"FreeFormItalic"), + ("_", K"FreeFormUnderline"), + ("-", K"FreeFormStrikethrough"), + ("!", K"FreeFormSpoiler"), + ("^", K"FreeFormSuperscript"), + (",", K"FreeFormSubscript"), + ("`", K"FreeFormInlineCode"), + ("%", K"FreeFormNullModifier"), + ("\$", K"FreeFormInlineMath"), + ("&", K"FreeFormVariable"), ] freeform_templates = [ @@ -230,7 +226,7 @@ freeform_templates = [ " inner " ] -@testset "Standalone markup for $k" for (m,k) in simple_freeformmarkups +@testset "Standalone markup for $k" for (m, k) in simple_freeformmarkups for s in freeform_templates ast = norg("$(m)|$s|$(m)") @test ast isa Norg.AST.NorgDocument @@ -248,12 +244,10 @@ freeform_templates = [ end verbatim_nested = [ - ("`", K"FreeFormInlineCode"), - ("\$", K"FreeFormInlineMath"), - ("&", K"FreeFormVariable") + ("`", K"FreeFormInlineCode"), ("\$", K"FreeFormInlineMath"), ("&", K"FreeFormVariable") ] -@testset "Verbatim markup nesting test: $V" for (v,V) in verbatim_nested +@testset "Verbatim markup nesting test: $V" for (v, V) in verbatim_nested @testset "Nested markup $T inside $V" for (m, T) in simple_markups if occursin(m, "`\$&") continue diff --git a/test/ast_tests/test_nestable_detached_modifiers.jl b/test/ast_tests/test_nestable_detached_modifiers.jl index 476f8e6..f01c9c7 100644 --- a/test/ast_tests/test_nestable_detached_modifiers.jl +++ b/test/ast_tests/test_nestable_detached_modifiers.jl @@ -2,9 +2,11 @@ Node = Norg.AST.Node AST = Norg.AST -nestable = [('-', K"UnorderedList1") - ('~', K"OrderedList1") - ('>', K"Quote1")] +nestable = [ + ('-', K"UnorderedList1") + ('~', K"OrderedList1") + ('>', K"Quote1") +] @testset "$T should be grouping." for (m, T) in nestable s = """$m first item @@ -18,7 +20,8 @@ nestable = [('-', K"UnorderedList1") @test kind(item2) == K"NestableItem" end -@testset "$T grouping should not happen when there is a paragraph break." for (m, T) in nestable +@testset "$T grouping should not happen when there is a paragraph break." for (m, T) in + nestable s = """$m first item $m second item @@ -33,9 +36,11 @@ end @test kind(item2) == K"NestableItem" end -nestable_check = [('-', AST.is_unordered_list) - ('~', AST.is_ordered_list) - ('>', AST.is_quote)] +nestable_check = [ + ('-', AST.is_unordered_list) + ('~', AST.is_ordered_list) + ('>', AST.is_quote) +] @testset "$m should be nestable." for (m, verif) in nestable_check s = """$m item1 diff --git a/test/ast_tests/test_paragraphs.jl b/test/ast_tests/test_paragraphs.jl index 1e1b1a2..eae82b9 100644 --- a/test/ast_tests/test_paragraphs.jl +++ b/test/ast_tests/test_paragraphs.jl @@ -1,8 +1,7 @@ Node = Norg.AST.Node @testset "Two newlines should separate two paragraphs." begin - ast = norg( - "Hi I am first paragraph.\n\nOh, hello there, I am second paragraph !") + ast = norg("Hi I am first paragraph.\n\nOh, hello there, I am second paragraph !") p1, p2 = children(ast.root) @test kind(p1) == K"Paragraph" @test kind(p2) == K"Paragraph" @@ -10,9 +9,10 @@ end @testset "One newline should separate two paragraph segments." begin ast = norg( - "Hi I am first paragraph segment...\nAnd I am second paragraph segment !\n\nOh, hello there, I am second paragraph !") + "Hi I am first paragraph segment...\nAnd I am second paragraph segment !\n\nOh, hello there, I am second paragraph !", + ) p1, p2 = children(ast.root) - ps1,ps2 = children(p1) + ps1, ps2 = children(p1) ps3 = first(children(p2)) @test kind(p1) == K"Paragraph" @test kind(p2) == K"Paragraph" diff --git a/test/ast_tests/test_rangeable_detached_modifiers.jl b/test/ast_tests/test_rangeable_detached_modifiers.jl index 4eee4ec..82fe86d 100644 --- a/test/ast_tests/test_rangeable_detached_modifiers.jl +++ b/test/ast_tests/test_rangeable_detached_modifiers.jl @@ -14,13 +14,13 @@ rangeable = [ outside """ ast = norg(s) - rang,p = children(ast.root) + rang, p = children(ast.root) @test kind(rang) == T @test kind(p) == K"Paragraph" @test textify(ast, p) == "outside" item = first(children(rang)) @test kind(item) == K"RangeableItem" - title,content = children(item) + title, content = children(item) @test kind(title) == K"ParagraphSegment" @test kind(content) == K"Paragraph" @test textify(ast, title) == "title" @@ -36,13 +36,13 @@ end outside """ ast = norg(s) - rang,p = children(ast.root) + rang, p = children(ast.root) @test kind(rang) == T @test kind(p) == K"Paragraph" @test textify(ast, p) == "outside" item = first(children(rang)) @test kind(item) == K"RangeableItem" - title,content1,content2 = children(item) + title, content1, content2 = children(item) @test kind(title) == K"ParagraphSegment" @test kind(content1) == K"Paragraph" @test kind(content2) == K"Paragraph" @@ -60,13 +60,13 @@ end """ ast = norg(s) t = first(children(ast.root)) - _,rang,p = children(t) + _, rang, p = children(t) @test kind(rang) == T @test kind(p) == K"Paragraph" @test textify(ast, p) == "outside" item = first(children(rang)) @test kind(item) == K"RangeableItem" - title,content = children(item) + title, content = children(item) @test kind(title) == K"ParagraphSegment" @test kind(content) == K"Paragraph" @test textify(ast, title) == "title" @@ -84,13 +84,13 @@ end """ ast = norg(s) t = first(children(ast.root)) - _,rang,p = children(t) + _, rang, p = children(t) @test kind(rang) == T @test kind(p) == K"Paragraph" @test textify(ast, p) == "outside" item = first(children(rang)) @test kind(item) == K"RangeableItem" - title,content1,content2 = children(item) + title, content1, content2 = children(item) @test kind(title) == K"ParagraphSegment" @test kind(content1) == K"Paragraph" @test kind(content2) == K"Paragraph" @@ -106,13 +106,13 @@ end outside """ ast = norg(s) - rang,p = children(ast.root) + rang, p = children(ast.root) @test kind(rang) == T @test kind(p) == K"Paragraph" @test textify(ast, p) == "outside" item = first(children(rang)) @test kind(item) == K"RangeableItem" - title,content = children(item) + title, content = children(item) @test kind(title) == K"ParagraphSegment" @test kind(content) == K"Paragraph" @test textify(ast, title) == "title" @@ -128,13 +128,13 @@ end outside """ ast = norg(s) - rang,p = children(ast.root) + rang, p = children(ast.root) @test kind(rang) == T @test kind(p) == K"Paragraph" @test textify(ast, p) == "outside" item = first(children(rang)) @test kind(item) == K"RangeableItem" - title,content1,content2 = children(item) + title, content1, content2 = children(item) @test kind(title) == K"ParagraphSegment" @test kind(content1) == K"Paragraph" @test kind(content2) == K"Paragraph" @@ -144,27 +144,28 @@ end end @testset "Rangeables must be grouping: $T" for (m, T) in rangeable - make_str(str_kind, label) = if str_kind=="simple" - """$m title$(label) - content$(label) - """ - else - """$m$m title$(label) - content$(label) - $m$m - """ - end + make_str(str_kind, label) = + if str_kind == "simple" + """$m title$(label) + content$(label) + """ + else + """$m$m title$(label) + content$(label) + $m$m + """ + end for a in ("simple", "matched") s_a = make_str(a, "a") for b in ("simple", "matched") s_b = make_str(b, "b") for c in ("simple", "matched") s_c = make_str(c, "c") - s = s_a*s_b*s_c + s = s_a * s_b * s_c ast = norg(s) rang = first(children(ast.root)) @test kind(rang) == T - for (l,item) in zip(["a", "b", "c"], children(rang)) + for (l, item) in zip(["a", "b", "c"], children(rang)) @test kind(item) === K"RangeableItem" title, content = children(item) @test textify(ast, title) == "title$l" diff --git a/test/ast_tests/test_tags.jl b/test/ast_tests/test_tags.jl index 3ec9af5..1d7cd3d 100644 --- a/test/ast_tests/test_tags.jl +++ b/test/ast_tests/test_tags.jl @@ -139,38 +139,34 @@ tagtypes = [ end @testset "Weak carryover tag applies to the right elements." begin -@testset "Paragraghs and paragraph segments." begin - ast = norg""" - +test - Applied here. - Not applied here. - """ - p = first(children(ast.root)) - t,ps = children(p) - @test kind(t) == K"WeakCarryoverTag" - @test kind(ps) == K"ParagraphSegment" - label, ps = children(t) - @test Norg.Codegen.textify(ast, label) == "test" - @test Norg.Codegen.textify(ast, ps) == "Applied here." - ast = norg""" - Not applied here. - +test - Applied here. - """ - p = first(children(ast.root)) - ps,t = children(p) - @test kind(t) == K"WeakCarryoverTag" - @test kind(ps) == K"ParagraphSegment" - label, ps = children(t) - @test Norg.Codegen.textify(ast, label) == "test" - @test Norg.Codegen.textify(ast, ps) == "Applied here." -end -nestables = [ - ("-", K"UnorderedList1"), - ("~", K"OrderedList1"), - (">", K"Quote1") -] -@testset "Nestable modifiers: $m" for (t,m) in nestables + @testset "Paragraghs and paragraph segments." begin + ast = norg""" + +test + Applied here. + Not applied here. + """ + p = first(children(ast.root)) + t, ps = children(p) + @test kind(t) == K"WeakCarryoverTag" + @test kind(ps) == K"ParagraphSegment" + label, ps = children(t) + @test Norg.Codegen.textify(ast, label) == "test" + @test Norg.Codegen.textify(ast, ps) == "Applied here." + ast = norg""" + Not applied here. + +test + Applied here. + """ + p = first(children(ast.root)) + ps, t = children(p) + @test kind(t) == K"WeakCarryoverTag" + @test kind(ps) == K"ParagraphSegment" + label, ps = children(t) + @test Norg.Codegen.textify(ast, label) == "test" + @test Norg.Codegen.textify(ast, ps) == "Applied here." + end + nestables = [("-", K"UnorderedList1"), ("~", K"OrderedList1"), (">", K"Quote1")] + @testset "Nestable modifiers: $m" for (t, m) in nestables s = """ +test $t applied @@ -178,7 +174,7 @@ nestables = [ """ ast = norg(s) nestable = first(children(ast.root)) - tag,item = children(nestable) + tag, item = children(nestable) @test kind(tag) == K"WeakCarryoverTag" @test kind(item) == K"NestableItem" @test Norg.Codegen.textify(ast, item) == "not applied" @@ -192,69 +188,71 @@ nestables = [ """ ast = norg(s) nestable = first(children(ast.root)) - item,tag = children(nestable) + item, tag = children(nestable) @test kind(tag) == K"WeakCarryoverTag" @test kind(item) == K"NestableItem" @test Norg.Codegen.textify(ast, item) == "not applied" label, item = children(tag) @test Norg.Codegen.textify(ast, label) == "test" @test Norg.Codegen.textify(ast, item) == "applied" -end -various = [ - (""" - +test - * Heading - hi there - """, K"Heading1") - (""" - +test - @test - blip - @end - """, K"Verbatim") -] -@testset "Various child kind: $k" for (s,k) in various - ast = norg(s) - tag = first(children(ast.root)) - @test kind(tag) == K"WeakCarryoverTag" - label,child = children(tag) - @test Norg.Codegen.textify(ast, label) == "test" - @test kind(child) == k -end + end + various = [ + ( + """ + +test + * Heading + hi there + """, + K"Heading1", + ) + ( + """ + +test + @test + blip + @end + """, + K"Verbatim", + ) + ] + @testset "Various child kind: $k" for (s, k) in various + ast = norg(s) + tag = first(children(ast.root)) + @test kind(tag) == K"WeakCarryoverTag" + label, child = children(tag) + @test Norg.Codegen.textify(ast, label) == "test" + @test kind(child) == k + end end @testset "Strong carryover tag applies to the right elements." begin -@testset "Paragraghs and paragraph segments." begin - ast = norg""" - #test - Applied here. - Applied here too. - """ - t = first(children(ast.root)) - @test length(children(t)) == 2 - label,p = children(t) - @test kind(t) == K"StrongCarryoverTag" - @test kind(p) == K"Paragraph" - @test Norg.Codegen.textify(ast, label) == "test" - ast = norg""" - Not applied here. - #test - Applied here. - """ - p1,t = children(ast.root) - @test kind(t) == K"StrongCarryoverTag" - @test kind(p1) == K"Paragraph" - label, p2 = children(t) - @test kind(p2) == K"Paragraph" - @test Norg.Codegen.textify(ast, label) == "test" - @test Norg.Codegen.textify(ast, p2) == "Applied here." -end -nestables = [ - ("-", K"UnorderedList1"), - ("~", K"OrderedList1"), - (">", K"Quote1") -] -@testset "Nestable modifiers: $m" for (t,m) in nestables + @testset "Paragraghs and paragraph segments." begin + ast = norg""" + #test + Applied here. + Applied here too. + """ + t = first(children(ast.root)) + @test length(children(t)) == 2 + label, p = children(t) + @test kind(t) == K"StrongCarryoverTag" + @test kind(p) == K"Paragraph" + @test Norg.Codegen.textify(ast, label) == "test" + ast = norg""" + Not applied here. + #test + Applied here. + """ + p1, t = children(ast.root) + @test kind(t) == K"StrongCarryoverTag" + @test kind(p1) == K"Paragraph" + label, p2 = children(t) + @test kind(p2) == K"Paragraph" + @test Norg.Codegen.textify(ast, label) == "test" + @test Norg.Codegen.textify(ast, p2) == "Applied here." + end + nestables = [("-", K"UnorderedList1"), ("~", K"OrderedList1"), (">", K"Quote1")] + @testset "Nestable modifiers: $m" for (t, m) in nestables s = """ #test $t applied @@ -278,7 +276,7 @@ nestables = [ $t applied """ ast = norg(s) - nestable,tag = children(ast.root) + nestable, tag = children(ast.root) @test kind(tag) == K"StrongCarryoverTag" @test kind(nestable) == m @test kind(first(children(nestable))) == K"NestableItem" @@ -288,28 +286,34 @@ nestables = [ @test kind(nestable) == m @test kind(first(children(nestable))) == K"NestableItem" @test Norg.Codegen.textify(ast, nestable) == "applied" -end -various = [ - (""" - #test - * Heading - hi there - """, K"Heading1") - (""" - #test - @test - blip - @end - """, K"Verbatim") -] -@testset "Various child kind: $k" for (s,k) in various - ast = norg(s) - tag = first(children(ast.root)) - @test kind(tag) == K"StrongCarryoverTag" - label,child = children(tag) - @test Norg.Codegen.textify(ast, label) == "test" - @test kind(child) == k -end + end + various = [ + ( + """ + #test + * Heading + hi there + """, + K"Heading1", + ) + ( + """ + #test + @test + blip + @end + """, + K"Verbatim", + ) + ] + @testset "Various child kind: $k" for (s, k) in various + ast = norg(s) + tag = first(children(ast.root)) + @test kind(tag) == K"StrongCarryoverTag" + label, child = children(tag) + @test Norg.Codegen.textify(ast, label) == "test" + @test kind(child) == k + end end standard_children = [ diff --git a/test/code_analysis_tests/test_aqua.jl b/test/code_analysis_tests/test_aqua.jl new file mode 100644 index 0000000..dd2a0fe --- /dev/null +++ b/test/code_analysis_tests/test_aqua.jl @@ -0,0 +1,7 @@ +using Aqua + +@static if VERSION < v"1.9" + Aqua.test_all(Norg, ambiguities=false) +else + Aqua.test_all(Norg) +end diff --git a/test/code_analysis_tests/test_jet.jl b/test/code_analysis_tests/test_jet.jl index ba35744..4d6940f 100644 --- a/test/code_analysis_tests/test_jet.jl +++ b/test/code_analysis_tests/test_jet.jl @@ -1,26 +1,27 @@ +@static if VERSION ≥ v"1.9" using JET, AbstractTrees @testset "JET.jl -> See https://aviatesk.github.io/JET.jl/stable/jetanalysis/#Errors-kinds-and-how-to-fix-them" begin + payload = open(Norg.NORG_SPEC_PATH, "r") do f + read(f, String) + end -payload = open(Norg.NORG_SPEC_PATH, "r") do f - read(f, String) -end - -# Error analysis + # Error analysis -# Parse the entire spec -@test_call ignored_modules=(AbstractTrees, Base) norg(payload) -ast = norg(payload) -# HTML codegen -@test_call ignored_modules=(AbstractTrees, Base) Norg.codegen(HTMLTarget(), ast) -# JSON codegen -@test_call ignored_modules=(AbstractTrees, Base) Norg.codegen(JSONTarget(), ast) + # Parse the entire spec + @test_call ignored_modules = (AbstractTrees, Base) norg(payload) + ast = norg(payload) + # HTML codegen + @test_call ignored_modules = (AbstractTrees, Base) Norg.codegen(HTMLTarget(), ast) + # JSON codegen + @test_call ignored_modules = (AbstractTrees, Base) Norg.codegen(JSONTarget(), ast) -# Optimization analysis -# Parsing -@test_opt ignored_modules=(AbstractTrees, Base) norg(payload) -# Codegen -@test_opt ignored_modules=(AbstractTrees, Base) Norg.codegen(HTMLTarget(), ast) -@test_opt broken=true ignored_modules=(AbstractTrees, Base) Norg.codegen(JSONTarget(), ast) + # Optimization analysis + # Parsing + @test_opt ignored_modules = (AbstractTrees, Base) norg(payload) + # Codegen + @test_opt ignored_modules = (AbstractTrees, Base) Norg.codegen(HTMLTarget(), ast) + @test_opt broken = true ignored_modules = (AbstractTrees, Base) Norg.codegen( + JSONTarget(), ast + ) +end end - - diff --git a/test/codegen_tests/html.jl b/test/codegen_tests/html.jl index d3e569a..a36ca69 100644 --- a/test/codegen_tests/html.jl +++ b/test/codegen_tests/html.jl @@ -1,170 +1,171 @@ @testset "HTML target" begin -using Gumbo - -@testset "Test paragraphs" begin - s = "Hi I am first paragraph.\n\nOh, hello there, I am second paragraph !" - html = norg(HTMLTarget(), s) |> string |> parsehtml - pars = html.root[2][1] - @test tag(pars[1]) == :p - @test tag(pars[2]) == :p -end + using Gumbo + + @testset "Test paragraphs" begin + s = "Hi I am first paragraph.\n\nOh, hello there, I am second paragraph !" + html = parsehtml(string(norg(HTMLTarget(), s))) + pars = html.root[2][1] + @test tag(pars[1]) == :p + @test tag(pars[2]) == :p + end -simple_markups_nodes = [ - ('*', :b), - ('/', :i), - ('_', :ins), - ('-', :del), - ('!', :span), - ('^', :sup), - (',', :sub), - ('`', :code), -] - -simple_markups_class = [ - ('*', nothing), - ('/', nothing), - ('_', nothing), - ('-', nothing), - ('!', "spoiler"), - ('^', nothing), - (',', nothing), - ('`', nothing), -] - -@testset "Test correct markup for $m" for (m, html_node) in simple_markups_nodes - s = "$(m)inner$(m)" - html = norg(HTMLTarget(), s) |> string |> parsehtml - b = html.root[2][1][1][1] - @test tag(b) == html_node -end + simple_markups_nodes = [ + ('*', :b), + ('/', :i), + ('_', :ins), + ('-', :del), + ('!', :span), + ('^', :sup), + (',', :sub), + ('`', :code), + ] + + simple_markups_class = [ + ('*', nothing), + ('/', nothing), + ('_', nothing), + ('-', nothing), + ('!', "spoiler"), + ('^', nothing), + (',', nothing), + ('`', nothing), + ] + + @testset "Test correct markup for $m" for (m, html_node) in simple_markups_nodes + s = "$(m)inner$(m)" + html = parsehtml(string(norg(HTMLTarget(), s))) + b = html.root[2][1][1][1] + @test tag(b) == html_node + end -@testset "Test correct class for $m" for (m, html_class) in simple_markups_class - s = "$(m)inner$(m)" - html = norg(HTMLTarget(), s) |> string |> parsehtml - b = html.root[2][1][1][1] - if isnothing(html_class) - @test !haskey(attrs(b), "class") - else - @test haskey(attrs(b), "class") - @test getattr(b, "class") == html_class + @testset "Test correct class for $m" for (m, html_class) in simple_markups_class + s = "$(m)inner$(m)" + html = parsehtml(string(norg(HTMLTarget(), s))) + b = html.root[2][1][1][1] + if isnothing(html_class) + @test !haskey(attrs(b), "class") + else + @test haskey(attrs(b), "class") + @test getattr(b, "class") == html_class + end end -end -simple_link_tests = [ -(":norg_file:", "norg_file", "norg_file") -("* heading", "#h1-heading", "heading") -("** heading", "#h2-heading", "heading") -("*** heading", "#h3-heading", "heading") -("**** heading", "#h4-heading", "heading") -("***** heading", "#h5-heading", "heading") -("****** heading", "#h6-heading", "heading") -("******* heading", "#h6-heading", "heading") -("# magic", "", "magic") -("42", "#l-42", "#l-42") -("https://example.org", "https://example.org", "https://example.org") -("file://example.txt", "file://example.txt", "file://example.txt") -("/ example.txt", "example.txt", "example.txt") -("? test", "/test", "test") -] - -@testset "Test links: $link" for (link, target, text) in simple_link_tests - s = "{$link}" - html = norg(HTMLTarget(), s) |> string |> parsehtml - link = html.root[2][1][1][1] - @test tag(link) == :a - @test getattr(link, "href") == target - @test string(link[1]) == text -end + simple_link_tests = [ + (":norg_file:", "norg_file", "norg_file") + ("* heading", "#h1-heading", "heading") + ("** heading", "#h2-heading", "heading") + ("*** heading", "#h3-heading", "heading") + ("**** heading", "#h4-heading", "heading") + ("***** heading", "#h5-heading", "heading") + ("****** heading", "#h6-heading", "heading") + ("******* heading", "#h6-heading", "heading") + ("# magic", "", "magic") + ("42", "#l-42", "#l-42") + ("https://example.org", "https://example.org", "https://example.org") + ("file://example.txt", "file://example.txt", "file://example.txt") + ("/ example.txt", "example.txt", "example.txt") + ("? test", "/test", "test") + ] + + @testset "Test links: $link" for (link, target, text) in simple_link_tests + s = "{$link}" + html = parsehtml(string(norg(HTMLTarget(), s))) + link = html.root[2][1][1][1] + @test tag(link) == :a + @test getattr(link, "href") == target + @test string(link[1]) == text + end -@testset "Test links with description: $link" for (link, target) in simple_link_tests - s = "{$link}[website]" - html = norg(HTMLTarget(), s) |> string |> parsehtml - link = html.root[2][1][1][1] - @test tag(link) == :a - @test getattr(link, "href") == target - @test string(link[1]) == "website" -end + @testset "Test links with description: $link" for (link, target) in simple_link_tests + s = "{$link}[website]" + html = parsehtml(string(norg(HTMLTarget(), s))) + link = html.root[2][1][1][1] + @test tag(link) == :a + @test getattr(link, "href") == target + @test string(link[1]) == "website" + end -@testset "Anchors with embedded definition: $link" for (link, target) in simple_link_tests - s = "[website]{$link}" - html = norg(HTMLTarget(), s) |> string |> parsehtml - link = html.root[2][1][1][1] - @test tag(link) == :a - @test getattr(link, "href") == target - @test string(link[1]) == "website" -end + @testset "Anchors with embedded definition: $link" for (link, target) in + simple_link_tests + s = "[website]{$link}" + html = parsehtml(string(norg(HTMLTarget(), s))) + link = html.root[2][1][1][1] + @test tag(link) == :a + @test getattr(link, "href") == target + @test string(link[1]) == "website" + end -@testset "Verbatim code" begin - s = """@code julia - using Norg, Hyperscript - s = "*Hi there*" - html = norg(HTMLTarget(), s) |> string |> parsehtml - @end - """ - html = norg(HTMLTarget(), s) |> string |> parsehtml - pre = html.root[2][1][1] - @test tag(pre) == :pre - code = pre[2] - @test tag(code) == :code - @test haskey(attrs(code), "class") - @test getattr(code, "class") == "language-julia" -end + @testset "Verbatim code" begin + s = """@code julia + using Norg, Hyperscript + s = "*Hi there*" + html = norg(HTMLTarget(), s) |> string |> parsehtml + @end + """ + html = parsehtml(string(norg(HTMLTarget(), s))) + pre = html.root[2][1][1] + @test tag(pre) == :pre + code = pre[2] + @test tag(code) == :code + @test haskey(attrs(code), "class") + @test getattr(code, "class") == "language-julia" + end -heading_levels = 1:6 - -@testset "Level $i heading" for i in heading_levels - s = """$(repeat("*", i)) heading - text - """ - html = norg(HTMLTarget(), s) |> string |> parsehtml - section = html.root[2][1][1] - @test tag(section) == :section - @test haskey(attrs(section), "id") - @test getattr(section, "id") == "section-h$(i)-heading" - h,p = children(section) - @test tag(h) == Symbol("h$i") - @test haskey(attrs(h), "id") - @test getattr(h, "id") == "h$(i)-heading" - @test text(first(children(h))) == "heading" - @test tag(p) == :p - @test text(first(children(p))) == "text" -end + heading_levels = 1:6 + + @testset "Level $i heading" for i in heading_levels + s = """$(repeat("*", i)) heading + text + """ + html = parsehtml(string(norg(HTMLTarget(), s))) + section = html.root[2][1][1] + @test tag(section) == :section + @test haskey(attrs(section), "id") + @test getattr(section, "id") == "section-h$(i)-heading" + h, p = children(section) + @test tag(h) == Symbol("h$i") + @test haskey(attrs(h), "id") + @test getattr(h, "id") == "h$(i)-heading" + @test text(first(children(h))) == "heading" + @test tag(p) == :p + @test text(first(children(p))) == "text" + end -nestable_lists = ['~'=>:ol, '-'=>:ul] -@testset "$target list" for (m, target) in nestable_lists - s = """$m Hello, salute sinchero oon kydooke - $m Shintero yuo been na - $m Na sinchere fedicheda - """ - html = norg(HTMLTarget(), s) |> string |> parsehtml - list = html.root[2][1][1] - @test tag(list) == target - lis = children(list) - @test all(tag.(lis) .== :li) -end + nestable_lists = ['~' => :ol, '-' => :ul] + @testset "$target list" for (m, target) in nestable_lists + s = """$m Hello, salute sinchero oon kydooke + $m Shintero yuo been na + $m Na sinchere fedicheda + """ + html = parsehtml(string(norg(HTMLTarget(), s))) + list = html.root[2][1][1] + @test tag(list) == target + lis = children(list) + @test all(tag.(lis) .== :li) + end -@testset "quote" begin - s = "> I QUOTE you" - html = norg(HTMLTarget(), s) |> string |> parsehtml - q = html.root[2][1][1] - @test tag(q) == :blockquote -end + @testset "quote" begin + s = "> I QUOTE you" + html = parsehtml(string(norg(HTMLTarget(), s))) + q = html.root[2][1][1] + @test tag(q) == :blockquote + end -@testset "inline link" begin - s = """""" - html = norg(HTMLTarget(), s) |> string |> parsehtml - p = html.root[2][1][1] - @test length(children(p)) == 1 - span = first(children(p)) - @test haskey(attrs(span), "id") - @test getattr(span, "id") == "inline-link-target" -end + @testset "inline link" begin + s = """""" + html = parsehtml(string(norg(HTMLTarget(), s))) + p = html.root[2][1][1] + @test length(children(p)) == 1 + span = first(children(p)) + @test haskey(attrs(span), "id") + @test getattr(span, "id") == "inline-link-target" + end -@testset "Parse the entier Norg spec without error." begin - s = open(Norg.NORG_SPEC_PATH, "r") do f - read(f, String) + @testset "Parse the entier Norg spec without error." begin + s = open(Norg.NORG_SPEC_PATH, "r") do f + read(f, String) + end + html = parsehtml(string(norg(HTMLTarget(), s))) + @test html isa HTMLDocument end - html = norg(HTMLTarget(), s) |> string |> parsehtml - @test html isa HTMLDocument -end end diff --git a/test/codegen_tests/json.jl b/test/codegen_tests/json.jl index f27a760..ef82ce4 100644 --- a/test/codegen_tests/json.jl +++ b/test/codegen_tests/json.jl @@ -1,200 +1,208 @@ @testset "JSON target" begin -using JSON -# generated JSON correctness is checked directly with pandoc -using pandoc_jll - -function pandoc_approval(json) - io_err = PipeBuffer() - try - pandoc() do pandoc_bin - io = PipeBuffer() - write(io, json) - run(pipeline(`$(pandoc_bin) -f json -t json`, stdin=io, stdout=devnull, stderr=io_err)) - end - catch e - err = String(take!(io_err)) - @error "Pandoc error" err - return false + using JSON + # generated JSON correctness is checked directly with pandoc + using pandoc_jll + + function pandoc_approval(json) + io_err = PipeBuffer() + try + pandoc() do pandoc_bin + io = PipeBuffer() + write(io, json) + run( + pipeline( + `$(pandoc_bin) -f json -t json`; + stdin=io, + stdout=devnull, + stderr=io_err, + ), + ) + end + catch e + err = String(take!(io_err)) + @error "Pandoc error" err + return false + end + return true end - true -end - -@testset "Test paragraphs" begin - s = "Hi I am first paragraph.\n\nOh, hello there, I am second paragraph !" - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - pars = json["blocks"] - @test pars[1]["t"] == "Para" - @test pars[2]["t"] == "Para" -end -simple_markups_nodes = [ - ('*', "Strong"), - ('/', "Emph"), - ('_', "Underline"), - ('-', "Strikeout"), - ('!', "Span"), - ('^', "Superscript"), - (',', "Subscript"), - ('`', "Code"), -] - -simple_markups_class = [ - ('*', nothing), - ('/', nothing), - ('_', nothing), - ('-', nothing), - ('!', "spoiler"), - ('^', nothing), - (',', nothing), - ('`', nothing), -] - -@testset "Test correct markup for $m" for (m, node) in simple_markups_nodes - s = "$(m)inner$(m)" - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - b = json["blocks"][1]["c"][1] - @test b["t"] == node -end + @testset "Test paragraphs" begin + s = "Hi I am first paragraph.\n\nOh, hello there, I am second paragraph !" + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + pars = json["blocks"] + @test pars[1]["t"] == "Para" + @test pars[2]["t"] == "Para" + end -@testset "Test correct class for $m" for (m, class) in simple_markups_class - s = "$(m)inner$(m)" - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - b = json["blocks"][1]["c"][1] - if !isnothing(class) - @test first(b["c"])[2][1] == class + simple_markups_nodes = [ + ('*', "Strong"), + ('/', "Emph"), + ('_', "Underline"), + ('-', "Strikeout"), + ('!', "Span"), + ('^', "Superscript"), + (',', "Subscript"), + ('`', "Code"), + ] + + simple_markups_class = [ + ('*', nothing), + ('/', nothing), + ('_', nothing), + ('-', nothing), + ('!', "spoiler"), + ('^', nothing), + (',', nothing), + ('`', nothing), + ] + + @testset "Test correct markup for $m" for (m, node) in simple_markups_nodes + s = "$(m)inner$(m)" + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + b = json["blocks"][1]["c"][1] + @test b["t"] == node end -end -simple_link_tests = [ -(":norg_file:", "norg_file", "norg_file") -("* heading", "#h1-heading", "heading") -("** heading", "#h2-heading", "heading") -("*** heading", "#h3-heading", "heading") -("**** heading", "#h4-heading", "heading") -("***** heading", "#h5-heading", "heading") -("****** heading", "#h6-heading", "heading") -("******* heading", "#h6-heading", "heading") -("# magic", "", "magic") -("42", "#l-42", "#l-42") -("https://example.org", "https://example.org", "https://example.org") -("file://example.txt", "file://example.txt", "file://example.txt") -("/ example.txt", "example.txt", "example.txt") -("? test", "/test", "test") -] - -@testset "Test links: $link" for (link, target, text) in simple_link_tests - s = "{$link}" - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - link = json["blocks"][1]["c"][1] - @test link["t"] == "Link" - @test link["c"][2][1]["t"] == "Str" - @test link["c"][2][1]["c"] == text - @test link["c"][3][1] == target -end + @testset "Test correct class for $m" for (m, class) in simple_markups_class + s = "$(m)inner$(m)" + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + b = json["blocks"][1]["c"][1] + if !isnothing(class) + @test first(b["c"])[2][1] == class + end + end -@testset "Test links with description: $link" for (link, target) in simple_link_tests - s = "{$link}[website]" - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - link = json["blocks"][1]["c"][1] - @test link["t"] == "Link" - @test link["c"][2][1]["t"] == "Str" - @test link["c"][2][1]["c"] == "website" - @test link["c"][3][1] == target -end + simple_link_tests = [ + (":norg_file:", "norg_file", "norg_file") + ("* heading", "#h1-heading", "heading") + ("** heading", "#h2-heading", "heading") + ("*** heading", "#h3-heading", "heading") + ("**** heading", "#h4-heading", "heading") + ("***** heading", "#h5-heading", "heading") + ("****** heading", "#h6-heading", "heading") + ("******* heading", "#h6-heading", "heading") + ("# magic", "", "magic") + ("42", "#l-42", "#l-42") + ("https://example.org", "https://example.org", "https://example.org") + ("file://example.txt", "file://example.txt", "file://example.txt") + ("/ example.txt", "example.txt", "example.txt") + ("? test", "/test", "test") + ] + + @testset "Test links: $link" for (link, target, text) in simple_link_tests + s = "{$link}" + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + link = json["blocks"][1]["c"][1] + @test link["t"] == "Link" + @test link["c"][2][1]["t"] == "Str" + @test link["c"][2][1]["c"] == text + @test link["c"][3][1] == target + end -@testset "Anchors with embedded definition: $link" for (link, target) in simple_link_tests - s = "[website]{$link}" - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - link = json["blocks"][1]["c"][1] - @test link["t"] == "Link" - @test link["c"][2][1]["t"] == "Str" - @test link["c"][2][1]["c"] == "website" - @test link["c"][3][1] == target -end + @testset "Test links with description: $link" for (link, target) in simple_link_tests + s = "{$link}[website]" + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + link = json["blocks"][1]["c"][1] + @test link["t"] == "Link" + @test link["c"][2][1]["t"] == "Str" + @test link["c"][2][1]["c"] == "website" + @test link["c"][3][1] == target + end -@testset "Verbatim code" begin - s = """@code julia - using Norg - s = "*Hi there*" - json = norg(Norg.JSONTarget(), s) - @end - """ - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - cb = json["blocks"][1] - @test cb["t"] == "CodeBlock" - attr, content = cb["c"] - @test attr[2][1] == "julia" - @test content == """using Norg\ns = "*Hi there*"\njson = norg(Norg.JSONTarget(), s)\n""" -end + @testset "Anchors with embedded definition: $link" for (link, target) in + simple_link_tests + s = "[website]{$link}" + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + link = json["blocks"][1]["c"][1] + @test link["t"] == "Link" + @test link["c"][2][1]["t"] == "Str" + @test link["c"][2][1]["c"] == "website" + @test link["c"][3][1] == target + end -heading_levels = 1:6 - -@testset "Level $i heading" for i in heading_levels - s = """$(repeat("*", i)) heading - text - """ - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - container = json["blocks"][1] - @test container["t"] == "Div" - attr, content = container["c"] - @test first(attr) == "section-h$i-heading" - heading = first(content) - @test heading["t"] == "Header" - hlevel, attr, title = heading["c"] - @test hlevel == i - @test attr[1] == "h$i-heading" - @test title[1]["c"] == "heading" -end + @testset "Verbatim code" begin + s = """@code julia + using Norg + s = "*Hi there*" + json = norg(Norg.JSONTarget(), s) + @end + """ + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + cb = json["blocks"][1] + @test cb["t"] == "CodeBlock" + attr, content = cb["c"] + @test attr[2][1] == "julia" + @test content == + """using Norg\ns = "*Hi there*"\njson = norg(Norg.JSONTarget(), s)\n""" + end -nestable_lists = ['~'=>"OrderedList", '-'=>"BulletList", ">"=>"BlockQuote"] -@testset "$target nestable" for (m, target) in nestable_lists - s = """$m Hello, salute sinchero oon kydooke - $m Shintero yuo been na - $m Na sinchere fedicheda - """ - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - list = json["blocks"][1] - @test list["t"] == target -end + heading_levels = 1:6 + + @testset "Level $i heading" for i in heading_levels + s = """$(repeat("*", i)) heading + text + """ + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + container = json["blocks"][1] + @test container["t"] == "Div" + attr, content = container["c"] + @test first(attr) == "section-h$i-heading" + heading = first(content) + @test heading["t"] == "Header" + hlevel, attr, title = heading["c"] + @test hlevel == i + @test attr[1] == "h$i-heading" + @test title[1]["c"] == "heading" + end -@testset "inline link" begin - s = """""" - json_str = norg(JSONTarget(), s) - @test pandoc_approval(json_str) - json = JSON.parse(json_str) - p = json["blocks"][1] - @test length(p["c"]) == 1 - span = first(p["c"]) - @test span["t"] == "Span" - attrs, inlines = span["c"] - id = first(attrs) - @test id == "inline-link-target" -end + nestable_lists = ['~' => "OrderedList", '-' => "BulletList", ">" => "BlockQuote"] + @testset "$target nestable" for (m, target) in nestable_lists + s = """$m Hello, salute sinchero oon kydooke + $m Shintero yuo been na + $m Na sinchere fedicheda + """ + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + list = json["blocks"][1] + @test list["t"] == target + end -@testset "Parse the entire Norg spec without error." begin - s = open(Norg.NORG_SPEC_PATH) do f - read(f, String) + @testset "inline link" begin + s = """""" + json_str = norg(JSONTarget(), s) + @test pandoc_approval(json_str) + json = JSON.parse(json_str) + p = json["blocks"][1] + @test length(p["c"]) == 1 + span = first(p["c"]) + @test span["t"] == "Span" + attrs, inlines = span["c"] + id = first(attrs) + @test id == "inline-link-target" end - json = norg(JSONTarget(), s) - @test pandoc_approval(json) -end + @testset "Parse the entire Norg spec without error." begin + s = open(Norg.NORG_SPEC_PATH) do f + read(f, String) + end + json = norg(JSONTarget(), s) + @test pandoc_approval(json) + end end diff --git a/test/runtests.jl b/test/runtests.jl index 6d91d42..55d0f06 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -6,8 +6,12 @@ using Aqua import Norg: @K_str, kind, value @testset "Norg.jl" begin - @testset "scanners.jl" begin include("test_scanners.jl") end - @testset "Tokenize.jl" begin include("test_tokenize.jl") end + @testset "scanners.jl" begin + include("test_scanners.jl") + end + @testset "Tokenize.jl" begin + include("test_tokenize.jl") + end @testset "parser.jl" begin include("ast_tests/test_markup.jl") include("ast_tests/test_paragraphs.jl") @@ -20,12 +24,12 @@ import Norg: @K_str, kind, value include("ast_tests/test_detached_modifier_suffix.jl") # include("ast_tests/misc_bugs.jl") end - @testset "codegen.jl" begin - include("codegen_tests/html.jl") + @testset "codegen.jl" begin + include("codegen_tests/html.jl") include("codegen_tests/json.jl") end @testset "code analysis" begin include("code_analysis_tests/test_jet.jl") - Aqua.test_all(Norg) + include("code_analysis_tests/test_aqua.jl") end end diff --git a/test/test_scanners.jl b/test/test_scanners.jl index cc10e01..73e2cbb 100644 --- a/test/test_scanners.jl +++ b/test/test_scanners.jl @@ -1,36 +1,34 @@ @testset "Line ending tokens" begin - @test Norg.Scanners.scan(Norg.Scanners.LineEnding(), "\r\nfoo") |> - Norg.Scanners.success - @test Norg.Scanners.scan(Norg.Scanners.LineEnding(), "foo") |> - !(Norg.Scanners.success) + @test Norg.Scanners.success(Norg.Scanners.scan(Norg.Scanners.LineEnding(), "\r\nfoo")) + @test !(Norg.Scanners.success)(Norg.Scanners.scan(Norg.Scanners.LineEnding(), "foo")) @test Norg.is_line_ending(Norg.Scanners.scan("\r\nfoo")) end @testset "Whitespace tokens" begin - @test Norg.Scanners.scan(Norg.Scanners.Whitespace(), " foo") |> - Norg.Scanners.success - @test Norg.Scanners.scan(Norg.Scanners.Whitespace(), "foo") |> - !Norg.Scanners.success + @test Norg.Scanners.success(Norg.Scanners.scan(Norg.Scanners.Whitespace(), " foo")) + @test !Norg.Scanners.success(Norg.Scanners.scan(Norg.Scanners.Whitespace(), "foo")) @test Norg.is_whitespace(Norg.Scanners.scan(" foo")) end @testset "Generic punctuation token" begin - @test Norg.Scanners.scan(Norg.Scanners.Punctuation(), - string(rand(Norg.Scanners.NORG_PUNCTUATION)) * - "foo") |> - Norg.Scanners.success - @test Norg.Scanners.scan(Norg.Scanners.Punctuation(), "foo") |> !Norg.Scanners.success - @test Norg.is_punctuation(Norg.Scanners.scan(string(rand(Norg.Scanners.NORG_PUNCTUATION)) * "foo")) + @test Norg.Scanners.success(Norg.Scanners.scan( + Norg.Scanners.Punctuation(), string(rand(Norg.Scanners.NORG_PUNCTUATION)) * "foo" + )) + @test !Norg.Scanners.success(Norg.Scanners.scan(Norg.Scanners.Punctuation(), "foo")) + @test Norg.is_punctuation( + Norg.Scanners.scan(string(rand(Norg.Scanners.NORG_PUNCTUATION)) * "foo") + ) end -@testset "Single punctuation kind $kind" for kind in Norg.Kinds.all_single_punctuation_tokens() - @test Norg.Scanners.scan(kind, "$(kind)foo") |> Norg.Scanners.success +@testset "Single punctuation kind $kind" for kind in + Norg.Kinds.all_single_punctuation_tokens() + @test Norg.Scanners.success(Norg.Scanners.scan(kind, "$(kind)foo")) @test Norg.kind(Norg.Scanners.scan("$(kind)foo")) == kind - @test Norg.Scanners.scan(kind, "foo") |> !Norg.Scanners.success + @test !Norg.Scanners.success(Norg.Scanners.scan(kind, "foo")) end @testset "Word token" begin - @test Norg.Scanners.scan(Norg.Scanners.Word(), "foo") |> Norg.Scanners.success - @test Norg.Scanners.scan("foo") |> Norg.is_word - @test Norg.Scanners.scan(Norg.Scanners.Word(), "}foo") |> !Norg.Scanners.success + @test Norg.Scanners.success(Norg.Scanners.scan(Norg.Scanners.Word(), "foo")) + @test Norg.is_word(Norg.Scanners.scan("foo")) + @test !Norg.Scanners.success(Norg.Scanners.scan(Norg.Scanners.Word(), "}foo")) end From 78a022a5db225f5c3d4db8d9b2ce7d62a3d5f31d Mon Sep 17 00:00:00 2001 From: Hugo Levy-Falk Date: Tue, 19 Sep 2023 07:33:20 +0200 Subject: [PATCH 7/9] Please gods of the CI, accept this humble offering. --- test/code_analysis_tests/test_jet.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/code_analysis_tests/test_jet.jl b/test/code_analysis_tests/test_jet.jl index 4d6940f..7a6aea4 100644 --- a/test/code_analysis_tests/test_jet.jl +++ b/test/code_analysis_tests/test_jet.jl @@ -1,5 +1,5 @@ -@static if VERSION ≥ v"1.9" using JET, AbstractTrees +if VERSION ≥ v"1.9" @testset "JET.jl -> See https://aviatesk.github.io/JET.jl/stable/jetanalysis/#Errors-kinds-and-how-to-fix-them" begin payload = open(Norg.NORG_SPEC_PATH, "r") do f read(f, String) From 8de9f6a3611697d2e3428e7147b1b383a41561a5 Mon Sep 17 00:00:00 2001 From: Hugo Levy-Falk Date: Tue, 19 Sep 2023 07:47:01 +0200 Subject: [PATCH 8/9] Remove JET tests for versions <= 1.9 --- Makefile | 15 +++++++++++++ format_project.jl | 32 ++++++++++++++++++++++++++++ test/code_analysis_tests/test_jet.jl | 2 -- test/runtests.jl | 4 +++- 4 files changed, 50 insertions(+), 3 deletions(-) create mode 100644 Makefile create mode 100644 format_project.jl diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..10e19a4 --- /dev/null +++ b/Makefile @@ -0,0 +1,15 @@ +JULIA=$(shell which julia) +TEST_PROCEDURE="import Pkg;Pkg.test()" + +format: + $(JULIA) format_project.jl + +test%: + $(JULIA) $* --project -e $(TEST_PROCEDURE) 2&> test-$*.log + +testall: test+lts test+beta test+release + +clean: + rm test*.log + +.PHONY: format, test, testall, clean diff --git a/format_project.jl b/format_project.jl new file mode 100644 index 0000000..c94d21c --- /dev/null +++ b/format_project.jl @@ -0,0 +1,32 @@ +using Pkg: Pkg +Pkg.add("JuliaFormatter") +using TOML, JuliaFormatter +format(".") +projecttoml = TOML.parsefile("Project.toml") +const _project_key_order = [ + "name", + "uuid", + "keywords", + "license", + "desc", + "deps", + "weakdeps", + "extensions", + "compat", + "extras", + "targets", +] +function project_key_order(key::String) + return something( + findfirst(x -> x == key, _project_key_order), length(_project_key_order) + 1 + ) +end + +function print_project(io, dict) + return TOML.print(io, dict; sorted=true, by=key -> (project_key_order(key), key)) +end + +open("Project.toml", "w") do io + @info "whoh" io + write(io, sprint(print_project, projecttoml)) +end diff --git a/test/code_analysis_tests/test_jet.jl b/test/code_analysis_tests/test_jet.jl index 7a6aea4..6600460 100644 --- a/test/code_analysis_tests/test_jet.jl +++ b/test/code_analysis_tests/test_jet.jl @@ -1,5 +1,4 @@ using JET, AbstractTrees -if VERSION ≥ v"1.9" @testset "JET.jl -> See https://aviatesk.github.io/JET.jl/stable/jetanalysis/#Errors-kinds-and-how-to-fix-them" begin payload = open(Norg.NORG_SPEC_PATH, "r") do f read(f, String) @@ -24,4 +23,3 @@ if VERSION ≥ v"1.9" JSONTarget(), ast ) end -end diff --git a/test/runtests.jl b/test/runtests.jl index 55d0f06..e59af3f 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -29,7 +29,9 @@ import Norg: @K_str, kind, value include("codegen_tests/json.jl") end @testset "code analysis" begin - include("code_analysis_tests/test_jet.jl") + if VERSION ≥ v"1.9" + include("code_analysis_tests/test_jet.jl") + end include("code_analysis_tests/test_aqua.jl") end end From 942bafd3c8466b318c00d35cc36841ef3b920030 Mon Sep 17 00:00:00 2001 From: Hugo Levy-Falk Date: Tue, 19 Sep 2023 07:51:37 +0200 Subject: [PATCH 9/9] Project.toml formatting cannot be consistent accross all Julia versions. T_T --- test/code_analysis_tests/test_aqua.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/code_analysis_tests/test_aqua.jl b/test/code_analysis_tests/test_aqua.jl index dd2a0fe..8705217 100644 --- a/test/code_analysis_tests/test_aqua.jl +++ b/test/code_analysis_tests/test_aqua.jl @@ -1,7 +1,7 @@ using Aqua @static if VERSION < v"1.9" - Aqua.test_all(Norg, ambiguities=false) + Aqua.test_all(Norg, ambiguities=false, project_toml_formatting=false) else Aqua.test_all(Norg) end