Rewrite fetch_pokeapi.py to read from local submodule instead of PokeAPI
Replace all pokebase API calls with local JSON file reads from the PokeAPI/api-data git submodule, removing the network dependency and pokebase package. The script now runs with stdlib Python only. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -21,7 +21,6 @@ dev = [
|
||||
"pytest>=8.0.0",
|
||||
"pytest-asyncio>=0.25.0",
|
||||
"httpx>=0.28.0",
|
||||
"pokebase>=1.4.0",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
"""Fetch game data from PokeAPI and write static JSON seed files.
|
||||
"""Fetch game data from local PokeAPI submodule and write static JSON seed files.
|
||||
|
||||
Uses pokebase which provides built-in file caching — first run fetches
|
||||
from the API, subsequent runs are instant from disk cache.
|
||||
Reads from the PokeAPI/api-data git submodule at data/pokeapi/ — no network
|
||||
access or container needed. Only uses Python stdlib.
|
||||
|
||||
Usage:
|
||||
# Against public PokeAPI (cached after first run):
|
||||
podman compose exec -w /app/src api python -m app.seeds.fetch_pokeapi
|
||||
python -m app.seeds.fetch_pokeapi
|
||||
|
||||
# Against local PokeAPI (no rate limits):
|
||||
podman compose exec -w /app/src api python -m app.seeds.fetch_pokeapi --base-url http://pokeapi-app:8000
|
||||
Requires the submodule to be initialized:
|
||||
git submodule update --init
|
||||
"""
|
||||
|
||||
import json
|
||||
@@ -16,11 +15,23 @@ import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pokebase as pb
|
||||
import pokebase.common as pb_common
|
||||
|
||||
REPO_ROOT = Path(__file__).parents[4] # backend/src/app/seeds -> repo root
|
||||
POKEAPI_DIR = REPO_ROOT / "data" / "pokeapi" / "data" / "api" / "v2"
|
||||
DATA_DIR = Path(__file__).parent / "data"
|
||||
|
||||
|
||||
def load_resource(endpoint: str, resource_id: int) -> dict:
|
||||
"""Load a PokeAPI resource from the local submodule data."""
|
||||
path = POKEAPI_DIR / endpoint / str(resource_id) / "index.json"
|
||||
with open(path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def extract_id(url: str) -> int:
|
||||
"""Extract the numeric ID from a PokeAPI URL (absolute or relative)."""
|
||||
return int(url.rstrip("/").split("/")[-1])
|
||||
|
||||
|
||||
# Game definitions
|
||||
VERSION_GROUPS = {
|
||||
"firered-leafgreen": {
|
||||
@@ -120,20 +131,20 @@ def clean_area_name(area_name: str, location_name: str) -> str | None:
|
||||
|
||||
def get_encounters_for_area(area_id: int, version_name: str) -> list[dict]:
|
||||
"""Get encounter data for a location area, filtered by version."""
|
||||
area = pb.location_area(area_id)
|
||||
area = load_resource("location-area", area_id)
|
||||
encounters = []
|
||||
|
||||
for pe in area.pokemon_encounters:
|
||||
pokemon_url = pe.pokemon.url
|
||||
dex_num = int(pokemon_url.rstrip("/").split("/")[-1])
|
||||
pokemon_name = pe.pokemon.name
|
||||
for pe in area["pokemon_encounters"]:
|
||||
pokemon_url = pe["pokemon"]["url"]
|
||||
dex_num = extract_id(pokemon_url)
|
||||
pokemon_name = pe["pokemon"]["name"]
|
||||
|
||||
for vd in pe.version_details:
|
||||
if vd.version.name != version_name:
|
||||
for vd in pe["version_details"]:
|
||||
if vd["version"]["name"] != version_name:
|
||||
continue
|
||||
|
||||
for enc in vd.encounter_details:
|
||||
method = enc.method.name
|
||||
for enc in vd["encounter_details"]:
|
||||
method = enc["method"]["name"]
|
||||
if method not in INCLUDED_METHODS:
|
||||
continue
|
||||
|
||||
@@ -141,9 +152,9 @@ def get_encounters_for_area(area_id: int, version_name: str) -> list[dict]:
|
||||
"pokemon_name": pokemon_name,
|
||||
"national_dex": dex_num,
|
||||
"method": method,
|
||||
"chance": enc.chance,
|
||||
"min_level": enc.min_level,
|
||||
"max_level": enc.max_level,
|
||||
"chance": enc["chance"],
|
||||
"min_level": enc["min_level"],
|
||||
"max_level": enc["max_level"],
|
||||
})
|
||||
|
||||
return encounters
|
||||
@@ -179,13 +190,13 @@ def process_version(version_name: str, vg_info: dict) -> list[dict]:
|
||||
"""Process all locations for a specific game version."""
|
||||
print(f"\n--- Processing {version_name} ---")
|
||||
|
||||
region = pb.region(vg_info["region_id"])
|
||||
location_refs = list(region.locations)
|
||||
region = load_resource("region", vg_info["region_id"])
|
||||
location_refs = list(region["locations"])
|
||||
|
||||
# For HGSS, also include Kanto locations
|
||||
if version_name in ("heartgold", "soulsilver"):
|
||||
kanto = pb.region(1)
|
||||
location_refs = location_refs + list(kanto.locations)
|
||||
kanto = load_resource("region", 1)
|
||||
location_refs = location_refs + list(kanto["locations"])
|
||||
|
||||
print(f" Found {len(location_refs)} locations")
|
||||
|
||||
@@ -193,12 +204,12 @@ def process_version(version_name: str, vg_info: dict) -> list[dict]:
|
||||
order = 1
|
||||
|
||||
for loc_ref in location_refs:
|
||||
loc_name = loc_ref.name
|
||||
loc_id = int(loc_ref.url.rstrip("/").split("/")[-1])
|
||||
loc_name = loc_ref["name"]
|
||||
loc_id = extract_id(loc_ref["url"])
|
||||
display_name = clean_location_name(loc_name)
|
||||
|
||||
location = pb.location(loc_id)
|
||||
areas = location.areas
|
||||
location = load_resource("location", loc_id)
|
||||
areas = location["areas"]
|
||||
if not areas:
|
||||
continue
|
||||
|
||||
@@ -206,8 +217,8 @@ def process_version(version_name: str, vg_info: dict) -> list[dict]:
|
||||
area_specific: dict[str, list[dict]] = {}
|
||||
|
||||
for area_ref in areas:
|
||||
area_id = int(area_ref.url.rstrip("/").split("/")[-1])
|
||||
area_slug = area_ref.name
|
||||
area_id = extract_id(area_ref["url"])
|
||||
area_slug = area_ref["name"]
|
||||
area_suffix = clean_area_name(area_slug, loc_name)
|
||||
|
||||
encounters = get_encounters_for_area(area_id, version_name)
|
||||
@@ -261,11 +272,11 @@ def fetch_pokemon_data(dex_numbers: set[int]) -> list[dict]:
|
||||
dex_sorted = sorted(dex_numbers)
|
||||
|
||||
for i, dex in enumerate(dex_sorted, 1):
|
||||
poke = pb.pokemon(dex)
|
||||
types = [t.type.name for t in poke.types]
|
||||
poke = load_resource("pokemon", dex)
|
||||
types = [t["type"]["name"] for t in poke["types"]]
|
||||
pokemon_list.append({
|
||||
"national_dex": dex,
|
||||
"name": poke.name.title().replace("-", " "),
|
||||
"name": poke["name"].title().replace("-", " "),
|
||||
"types": types,
|
||||
"sprite_url": f"https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/{dex}.png",
|
||||
})
|
||||
@@ -276,6 +287,152 @@ def fetch_pokemon_data(dex_numbers: set[int]) -> list[dict]:
|
||||
return sorted(pokemon_list, key=lambda x: x["national_dex"])
|
||||
|
||||
|
||||
def flatten_evolution_chain(chain: dict, seeded_dex: set[int]) -> list[dict]:
|
||||
"""Recursively flatten a PokeAPI evolution chain into (from, to) pairs."""
|
||||
pairs = []
|
||||
from_dex = int(chain["species"]["url"].rstrip("/").split("/")[-1])
|
||||
|
||||
for evo in chain.get("evolves_to", []):
|
||||
to_dex = int(evo["species"]["url"].rstrip("/").split("/")[-1])
|
||||
|
||||
for detail in evo["evolution_details"]:
|
||||
trigger = detail["trigger"]["name"]
|
||||
min_level = detail.get("min_level")
|
||||
item = detail.get("item")
|
||||
if item:
|
||||
item = item["name"]
|
||||
held_item = detail.get("held_item")
|
||||
if held_item:
|
||||
held_item = held_item["name"]
|
||||
|
||||
# Collect other conditions as a string
|
||||
conditions = []
|
||||
if detail.get("min_happiness"):
|
||||
conditions.append(f"happiness >= {detail['min_happiness']}")
|
||||
if detail.get("min_affection"):
|
||||
conditions.append(f"affection >= {detail['min_affection']}")
|
||||
if detail.get("min_beauty"):
|
||||
conditions.append(f"beauty >= {detail['min_beauty']}")
|
||||
if detail.get("time_of_day"):
|
||||
conditions.append(detail["time_of_day"])
|
||||
if detail.get("known_move"):
|
||||
conditions.append(f"knows {detail['known_move']['name']}")
|
||||
if detail.get("known_move_type"):
|
||||
conditions.append(f"knows {detail['known_move_type']['name']}-type move")
|
||||
if detail.get("location"):
|
||||
conditions.append(f"at {detail['location']['name']}")
|
||||
if detail.get("party_species"):
|
||||
conditions.append(f"with {detail['party_species']['name']} in party")
|
||||
if detail.get("party_type"):
|
||||
conditions.append(f"with {detail['party_type']['name']}-type in party")
|
||||
if detail.get("gender") is not None:
|
||||
conditions.append("female" if detail["gender"] == 1 else "male")
|
||||
if detail.get("needs_overworld_rain"):
|
||||
conditions.append("raining")
|
||||
if detail.get("turn_upside_down"):
|
||||
conditions.append("turn upside down")
|
||||
if detail.get("trade_species"):
|
||||
conditions.append(f"trade for {detail['trade_species']['name']}")
|
||||
if detail.get("relative_physical_stats") is not None:
|
||||
stat_map = {1: "atk > def", -1: "atk < def", 0: "atk = def"}
|
||||
conditions.append(stat_map.get(detail["relative_physical_stats"], ""))
|
||||
|
||||
condition = ", ".join(conditions) if conditions else None
|
||||
|
||||
if from_dex in seeded_dex and to_dex in seeded_dex:
|
||||
pairs.append({
|
||||
"from_national_dex": from_dex,
|
||||
"to_national_dex": to_dex,
|
||||
"trigger": trigger,
|
||||
"min_level": min_level,
|
||||
"item": item,
|
||||
"held_item": held_item,
|
||||
"condition": condition,
|
||||
})
|
||||
|
||||
# Recurse into further evolutions
|
||||
pairs.extend(flatten_evolution_chain(evo, seeded_dex))
|
||||
|
||||
return pairs
|
||||
|
||||
|
||||
def fetch_evolution_data(seeded_dex: set[int]) -> list[dict]:
|
||||
"""Fetch evolution chains from local PokeAPI data for all seeded pokemon."""
|
||||
print(f"\n--- Fetching evolution chains ---")
|
||||
|
||||
# First, get the evolution chain URL for each pokemon species
|
||||
chain_ids: set[int] = set()
|
||||
dex_sorted = sorted(seeded_dex)
|
||||
|
||||
for i, dex in enumerate(dex_sorted, 1):
|
||||
species = load_resource("pokemon-species", dex)
|
||||
chain_url = species["evolution_chain"]["url"]
|
||||
chain_id = extract_id(chain_url)
|
||||
chain_ids.add(chain_id)
|
||||
if i % 50 == 0 or i == len(dex_sorted):
|
||||
print(f" Species fetched: {i}/{len(dex_sorted)}")
|
||||
|
||||
print(f" Found {len(chain_ids)} unique evolution chains")
|
||||
|
||||
# Fetch each chain and flatten
|
||||
all_pairs: list[dict] = []
|
||||
seen: set[tuple[int, int, str]] = set()
|
||||
|
||||
for chain_id in sorted(chain_ids):
|
||||
chain = load_resource("evolution-chain", chain_id)
|
||||
pairs = flatten_evolution_chain(chain["chain"], seeded_dex)
|
||||
for p in pairs:
|
||||
key = (p["from_national_dex"], p["to_national_dex"], p["trigger"])
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
all_pairs.append(p)
|
||||
|
||||
print(f" Total evolution pairs: {len(all_pairs)}")
|
||||
return sorted(all_pairs, key=lambda x: (x["from_national_dex"], x["to_national_dex"]))
|
||||
|
||||
|
||||
def apply_evolution_overrides(evolutions: list[dict]) -> None:
|
||||
"""Apply overrides from evolution_overrides.json if it exists."""
|
||||
overrides_path = DATA_DIR / "evolution_overrides.json"
|
||||
if not overrides_path.exists():
|
||||
return
|
||||
|
||||
with open(overrides_path) as f:
|
||||
overrides = json.load(f)
|
||||
|
||||
# Remove entries
|
||||
for removal in overrides.get("remove", []):
|
||||
evolutions[:] = [
|
||||
e for e in evolutions
|
||||
if not (e["from_national_dex"] == removal["from_dex"]
|
||||
and e["to_national_dex"] == removal["to_dex"])
|
||||
]
|
||||
|
||||
# Add entries
|
||||
for addition in overrides.get("add", []):
|
||||
evolutions.append({
|
||||
"from_national_dex": addition["from_dex"],
|
||||
"to_national_dex": addition["to_dex"],
|
||||
"trigger": addition.get("trigger", "level-up"),
|
||||
"min_level": addition.get("min_level"),
|
||||
"item": addition.get("item"),
|
||||
"held_item": addition.get("held_item"),
|
||||
"condition": addition.get("condition"),
|
||||
})
|
||||
|
||||
# Modify entries
|
||||
for mod in overrides.get("modify", []):
|
||||
for e in evolutions:
|
||||
if (e["from_national_dex"] == mod["from_dex"]
|
||||
and e["to_national_dex"] == mod["to_dex"]):
|
||||
for key, value in mod.get("set", {}).items():
|
||||
e[key] = value
|
||||
|
||||
# Re-sort
|
||||
evolutions.sort(key=lambda x: (x["from_national_dex"], x["to_national_dex"]))
|
||||
print(f" Applied overrides: {len(evolutions)} pairs after overrides")
|
||||
|
||||
|
||||
def write_json(filename: str, data):
|
||||
path = DATA_DIR / filename
|
||||
with open(path, "w") as f:
|
||||
@@ -284,14 +441,13 @@ def write_json(filename: str, data):
|
||||
|
||||
|
||||
def main():
|
||||
# Check for custom base URL
|
||||
if "--base-url" in sys.argv:
|
||||
idx = sys.argv.index("--base-url")
|
||||
base_url = sys.argv[idx + 1]
|
||||
pb_common.BASE_URL = base_url + "/api/v2"
|
||||
print(f"Using custom PokeAPI: {base_url}")
|
||||
else:
|
||||
print("Using public PokeAPI (pokebase caches to disk after first fetch)")
|
||||
if not POKEAPI_DIR.is_dir():
|
||||
print(
|
||||
f"Error: PokeAPI data not found at {POKEAPI_DIR}\n"
|
||||
"Initialize the submodule with: git submodule update --init",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@@ -321,6 +477,12 @@ def main():
|
||||
write_json("pokemon.json", pokemon)
|
||||
print(f"\nWrote {len(pokemon)} Pokemon to pokemon.json")
|
||||
|
||||
# Fetch evolution chains
|
||||
evolutions = fetch_evolution_data(all_pokemon_dex)
|
||||
apply_evolution_overrides(evolutions)
|
||||
write_json("evolutions.json", evolutions)
|
||||
print(f"\nWrote {len(evolutions)} evolution pairs to evolutions.json")
|
||||
|
||||
print("\nDone! JSON files written to seeds/data/")
|
||||
print("Review route ordering and curate as needed.")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user