Files
cobblemon-ui/lib/cobblemon_ui/tier_list_scraper.ex
Alex Mickelson c5ebf5b9af
All checks were successful
Build and Deploy / Build & Push Image (push) Successful in 31s
scraping
2026-03-16 21:08:00 -06:00

174 lines
5.2 KiB
Elixir

defmodule CobblemonUi.TierListScraper do
use GenServer
require Logger
@url "https://rankedboost.com/pokemon/tier-list/"
@filename "pokemon_tier_list.json"
# ---------------------------------------------------------------------------
# Client API
# ---------------------------------------------------------------------------
def start_link(_opts) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@doc "Returns the current tier list map (%{lowercase_name => tier})."
def get_tier_list do
GenServer.call(__MODULE__, :get_tier_list)
end
@doc "Triggers a background re-scrape from HTTP."
def refresh do
GenServer.cast(__MODULE__, :refresh)
end
# ---------------------------------------------------------------------------
# Server callbacks
# ---------------------------------------------------------------------------
@impl true
def init(_opts) do
tier_list = load_from_file()
if map_size(tier_list) == 0 do
Logger.info("[TierListScraper] No cached data found, scheduling HTTP fetch")
send(self(), :fetch)
else
Logger.info("[TierListScraper] Loaded #{map_size(tier_list)} pokemon from cache file")
end
{:ok, tier_list}
end
@impl true
def handle_call(:get_tier_list, _from, state) do
{:reply, state, state}
end
@impl true
def handle_cast(:refresh, state) do
Logger.info("[TierListScraper] Manual refresh requested")
send(self(), :fetch)
{:noreply, state}
end
@impl true
def handle_info(:fetch, state) do
Logger.info("[TierListScraper] Starting scrape from #{@url}")
case run_scrape() do
{:ok, tier_list} ->
Logger.info("[TierListScraper] Updated state with #{map_size(tier_list)} pokemon")
{:noreply, tier_list}
{:error, reason} ->
Logger.error("[TierListScraper] Scrape failed, keeping existing state: #{inspect(reason)}")
{:noreply, state}
end
end
# ---------------------------------------------------------------------------
# Private scrape pipeline
# ---------------------------------------------------------------------------
defp run_scrape do
with {:ok, html} <- fetch_page(),
{:ok, pokemon} <- parse(html),
:ok <- write_json(pokemon) do
tier_list = to_tier_map(pokemon)
Logger.info("[TierListScraper] Successfully scraped and saved #{length(pokemon)} pokemon")
{:ok, tier_list}
else
{:error, reason} = err ->
Logger.error("[TierListScraper] Scrape failed: #{inspect(reason)}")
err
end
end
defp fetch_page do
Logger.debug("[TierListScraper] Fetching #{@url}")
case Req.get(@url, headers: [{"user-agent", "Mozilla/5.0 (compatible; CobblemonUI/1.0)"}]) do
{:ok, %Req.Response{status: 200, body: body}} ->
Logger.debug("[TierListScraper] Fetch OK, body size: #{byte_size(body)} bytes")
{:ok, body}
{:ok, %Req.Response{status: status}} ->
Logger.warning("[TierListScraper] Unexpected HTTP status: #{status}")
{:error, {:http_error, status}}
{:error, err} ->
Logger.error("[TierListScraper] HTTP request failed: #{inspect(err)}")
{:error, err}
end
end
defp parse(html) do
bands = html |> Floki.parse_document!() |> Floki.find(".tierlist-band")
Logger.debug("[TierListScraper] Found #{length(bands)} .tierlist-band nodes")
case bands do
[] ->
Logger.warning("[TierListScraper] No .tierlist-band elements found — page structure may have changed")
{:error, :no_pokemon_found}
_ ->
pokemon =
Enum.flat_map(bands, fn band ->
tier = band |> Floki.attribute("data-tier") |> List.first() |> to_string() |> String.upcase()
cards = Floki.find(band, ".tierlist-card")
Enum.flat_map(cards, fn card ->
case card |> Floki.attribute("data-name") |> List.first() do
nil -> []
"" -> []
name -> [%{name: String.trim(name), tier: tier}]
end
end)
end)
Logger.info("[TierListScraper] Parsed #{length(pokemon)} pokemon across #{length(bands)} tiers")
{:ok, pokemon}
end
end
defp write_json(data) do
path = output_file()
Logger.debug("[TierListScraper] Writing cache to #{path}")
File.mkdir_p!(Path.dirname(path))
File.write(path, Jason.encode!(data, pretty: true))
end
defp load_from_file do
path = output_file()
with {:ok, contents} <- File.read(path),
{:ok, entries} <- Jason.decode(contents) do
map = to_tier_map(entries)
Logger.info("[TierListScraper] Loaded #{map_size(map)} pokemon from #{path}")
map
else
{:error, :enoent} ->
Logger.info("[TierListScraper] Cache file not found at #{path}")
%{}
{:error, reason} ->
Logger.warning("[TierListScraper] Failed to load cache file: #{inspect(reason)}")
%{}
end
end
defp to_tier_map(entries) do
Map.new(entries, fn
%{"name" => name, "tier" => tier} -> {String.downcase(name), tier}
%{name: name, tier: tier} -> {String.downcase(name), tier}
end)
end
defp output_file do
dir = System.get_env("CACHE_DIR", ".")
Path.join(dir, @filename)
end
end