Skip to content

Commit

Permalink
How far back we can take Elixir compatibility by removing then/1?
Browse files Browse the repository at this point in the history
  • Loading branch information
g-andrade committed Oct 28, 2023
1 parent 8c86cb4 commit 69b5b5b
Show file tree
Hide file tree
Showing 6 changed files with 63 additions and 39 deletions.
12 changes: 12 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,18 @@ jobs:
strategy:
matrix:
elixir_vsn: [
'1.0',
'1.1',
'1.2',
'1.3',
'1.4',
'1.5',
'1.6',
'1.7',
'1.8',
'1.9',
'1.10',
'1.11',
'1.12',
'1.13',
'1.14',
Expand Down
12 changes: 12 additions & 0 deletions .github/workflows/elixir_version_to_otp_version.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,16 @@
{
"1.0": "18.3",
"1.1": "18.3",
"1.2": "19.3",
"1.3": "19.3",
"1.4": "19.3",
"1.5": "20.3",
"1.6": "21.3",
"1.7": "22.3",
"1.8": "22.3",
"1.9": "22.3",
"1.10": "23.3",
"1.11": "24.3",
"1.12": "24.3",
"1.13": "25.3",
"1.14": "26.1",
Expand Down
15 changes: 7 additions & 8 deletions lib/sqids/alphabet.ex
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
defmodule Sqids.Alphabet do
@moduledoc false
# import ExUnit.Assertions
import ExUnit.Assertions

@min_alphabet_length 3

Expand Down Expand Up @@ -106,13 +106,12 @@ defmodule Sqids.Alphabet do
@spec index_of!(t(), byte) :: index
def index_of!(%{} = alphabet, char) do
# It would be nice to optimize this.
alphabet
|> Enum.find_value(fn {index, byte} ->
byte == char and index
end)
|> then(fn index when index != nil ->
index
end)

index =
Enum.find_value(alphabet, fn {index, byte} -> byte == char and index end)

assert index !== nil
index
end

## Internal
Expand Down
26 changes: 13 additions & 13 deletions lib/sqids/blocklist.ex
Original file line number Diff line number Diff line change
Expand Up @@ -72,19 +72,19 @@ defmodule Sqids.Blocklist do
alphabet_graphemes_downcased = alphabet_str |> String.downcase() |> String.graphemes() |> MapSet.new()
sort_fun = fn word -> {String.length(word), word} end

words
|> Enum.uniq()
|> Enum.reduce(
_acc0 = %__MODULE__{min_word_length: min_word_length},
&maybe_add_blocklist_entry(&1, &2, alphabet_graphemes_downcased)
)
|> then(fn blocklist ->
%{
blocklist
| prefixes_and_suffixes: Enum.sort_by(blocklist.prefixes_and_suffixes, sort_fun),
matches_anywhere: Enum.sort_by(blocklist.matches_anywhere, sort_fun)
}
end)
blocklist =
words
|> Enum.uniq()
|> Enum.reduce(
_acc0 = %__MODULE__{min_word_length: min_word_length},
&maybe_add_blocklist_entry(&1, &2, alphabet_graphemes_downcased)
)

%{
blocklist
| prefixes_and_suffixes: Enum.sort_by(blocklist.prefixes_and_suffixes, sort_fun),
matches_anywhere: Enum.sort_by(blocklist.matches_anywhere, sort_fun)
}
end

@spec maybe_add_blocklist_entry(String.t(), t(), MapSet.t(String.grapheme())) :: t()
Expand Down
1 change: 1 addition & 0 deletions mix.exs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ defmodule Sqids.MixProject do
threshold: 94
]
],
dialyzer: [plt_add_apps: [:ex_unit]],
package: package()
]
end
Expand Down
36 changes: 18 additions & 18 deletions scripts/update_blocklist.exs
Original file line number Diff line number Diff line change
Expand Up @@ -35,24 +35,24 @@ defmodule Sqids.BlocklistUpdater do
defp convert_from_canonical_list do
log_step("Converting canonical blocklist...")

@path_of_canonical_json
|> File.read!()
|> Jason.decode!()
|> :lists.usort()
|> Enum.reduce(
_acc0 = "",
fn word, acc ->
refute match?(
{true, _},
{String.contains?(word, ["\n", "\r"]), word}
)

[acc, word, "\n"]
end
)
|> then(fn blocklist ->
File.write!(@path_of_txt_copy, blocklist)
end)
blocklist =
@path_of_canonical_json
|> File.read!()
|> Jason.decode!()
|> :lists.usort()
|> Enum.reduce(
_acc0 = "",
fn word, acc ->
refute match?(
{true, _},
{String.contains?(word, ["\n", "\r"]), word}
)

[acc, word, "\n"]
end
)

File.write!(@path_of_txt_copy, blocklist)
end

defp maybe_update_changelog do
Expand Down

0 comments on commit 69b5b5b

Please sign in to comment.