Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix some problems found by JET #129

Merged
merged 2 commits into from
Jun 22, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/Books.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ using Requires
using pandoc_jll
using pandoc_crossref_jll

const PROJECT_ROOT = pkgdir(Books)
const PROJECT_ROOT = string(pkgdir(Books))::String
const GENERATED_DIR = "_gen"
const DEFAULTS_DIR = joinpath(PROJECT_ROOT, "defaults")
const BUILD_DIR = "_build"
Expand Down
2 changes: 1 addition & 1 deletion src/build.jl
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ function html(; project="default", extra_head="")
copy_extra_directories(project)
url_prefix = is_ci() ? ci_url_prefix(project) : ""
c = config(project, "contents")
write_html_pages(url_prefix, c, pandoc_html(project), extra_head)
write_html_pages(url_prefix, pandoc_html(project), extra_head)
end

"""
Expand Down
12 changes: 6 additions & 6 deletions src/generate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ function extract_expr(s::AbstractString)::Vector
codeblock_pattern = r"```jl\s*([\w\W]*?)```"
matches = eachmatch(codeblock_pattern, s)
function clean(m)
m = m[1]
m = m[1]::SubString{String}
m = strip(m)
m = string(m)::String
end
Expand Down Expand Up @@ -158,11 +158,11 @@ function evaluate_and_write(f::Function)
end

"""
evaluate_include(expr::String, M::Module, fail_on_error::Bool)
evaluate_include(expr::String, M, fail_on_error::Bool)

For a `path` included in a Markdown file, run the corresponding function and write the output to `path`.
"""
function evaluate_include(expr::String, M::Module, fail_on_error::Bool)
function evaluate_include(expr::String, M, fail_on_error::Bool)
if isnothing(M)
# This code isn't really working.
M = caller_module()
Expand Down Expand Up @@ -193,7 +193,7 @@ function expand_path(p)
end

"""
gen(paths::Vector; M=nothing, fail_on_error=false, project="default")
gen(paths::Vector; M=Main, fail_on_error=false, project="default")

Populate the files in `$(Books.GENERATED_DIR)/` by calling the required methods.
These methods are specified by the filename and will output to that filename.
Expand All @@ -203,7 +203,7 @@ Otherwise, specify another module `M`.
After calling the methods, this method will also call `html()` to update the site when
`call_html == true`.
"""
function gen(paths::Vector; M=nothing, fail_on_error=false, project="default", call_html=true)
function gen(paths::Vector; M=Main, fail_on_error=false, project="default", call_html=true)
mkpath(GENERATED_DIR)
paths = [contains(dirname(p), "contents") ? p : expand_path(p) for p in paths]
included_expr = vcat([extract_expr(read(path, String)) for path in paths]...)
Expand All @@ -216,7 +216,7 @@ function gen(paths::Vector; M=nothing, fail_on_error=false, project="default", c
end
gen(path::String; kwargs...) = gen([path]; kwargs...)

function gen(; M=nothing, fail_on_error=false, project="default", call_html=true)
function gen(; M=Main, fail_on_error=false, project="default", call_html=true)
paths = inputs(project)
first_file = first(paths)
if !isfile(first_file)
Expand Down
24 changes: 12 additions & 12 deletions src/html.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@ import YAML
import URIs

"""
split_keepdelim(str::AbstractString, dlm::Regex)
split_keepdelim(str::AbstractString, delim::Regex)

Split on regex while keeping the matches.
Based on https://github.com/JuliaLang/julia/issues/20625#issuecomment-581585151.
"""
function split_keepdelim(str::AbstractString, dlm::Regex)
dlm = string(dlm)[3:end-1]
rx = Regex("(?=$dlm)")
function split_keepdelim(str::AbstractString, delim::Regex)
delim = string(delim)[3:end-1]
rx = Regex("(?=$delim)")
split(str, rx)
end

Expand Down Expand Up @@ -79,7 +79,7 @@ function section_infos(text)
for line in lines
m = match(numbered_rx, line)
if !isnothing(m)
number, id = m.captures
number, id = m.captures
line_end = split(line, '>')[end-1]
text = line_end[nextind(line_end, 0, 2):prevind(line_end, end, 4)]
tuple = (num = number, id = id, text = lstrip(text))
Expand Down Expand Up @@ -116,7 +116,7 @@ end
function html_href(text, link, level)
threshold = 33
if threshold < length(text)
shortened = text[1:threshold]
shortened = text[1:threshold]::String
text = shortened * ".."
end
"""<a class="menu-level-$level" href="$link">$text</a>"""
Expand All @@ -139,7 +139,7 @@ end

Menu including numbered sections.
"""
function add_menu(splitted=split_html())
function add_menu(splitted)
head, bodies, foot = splitted
data = pandoc_metadata()
title = data["title"]
Expand Down Expand Up @@ -224,7 +224,7 @@ function add_extra_head(head, extra_head::AbstractString)
replace(head, before => after)
end

function html_pages(chs=chapters(), h=pandoc_html(), extra_head="")
function html_pages(h, extra_head="")
head, menu, bodies, foot = add_menu(split_html(h))
head = add_extra_head(head, extra_head)
ids_texts = html_page_name.(bodies)
Expand All @@ -247,7 +247,7 @@ function map_ids(names, pages)
html = page
matches = eachmatch(rx, html)
for m in matches
capture = first(m.captures)
capture = first(m.captures)::SubString{String}
if startswith(capture, "sec:")
key = '#' * capture
mapping[key] = name
Expand All @@ -269,7 +269,7 @@ function fix_links(names, pages, url_prefix)
updated_pages = []
function fix_page(name, page)
function replace_match(s)
capture = first(match(rx, s).captures)
capture = first(match(rx, s).captures)::SubString{String}
if startswith(capture, "#sec:")
page_link = mapping[capture]
return uncapture("$url_prefix/$page_link.html$capture")
Expand All @@ -290,9 +290,9 @@ function fix_links(names, pages, url_prefix)
(names, fixed_pages)
end

function write_html_pages(url_prefix, chs=chapters(), h=pandoc_html(), extra_head="")
function write_html_pages(url_prefix, h::AbstractString, extra_head="")
h = fix_image_urls(h, url_prefix)
names, pages = html_pages(chs, h, extra_head)
names, pages = html_pages(h, extra_head)
names, pages = fix_links(names, pages, url_prefix)
for (i, (name, page)) in enumerate(zip(names, pages))
name = i == 1 ? "index" : name
Expand Down