Add Go-based PokeAPI fetch tool

Replaces the Python fetch_pokeapi.py script with a Go tool that crawls
a local PokeAPI instance and writes seed JSON files. Supports caching
and special encounter definitions via JSON config.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-07 19:44:05 +01:00
parent ab6c1adb1f
commit 0bf628157f
9 changed files with 1575 additions and 0 deletions

View File

@@ -0,0 +1,106 @@
package main
import (
"context"
"fmt"
"io"
"net/http"
"os"
"path/filepath"
"strings"
"time"
)
// Client is an HTTP client for the PokeAPI with disk caching and concurrency limiting.
type Client struct {
baseURL string
httpClient *http.Client
cacheDir string
sem chan struct{} // concurrency limiter
}
// NewClient creates a new PokeAPI client.
func NewClient(baseURL, cacheDir string, concurrency int) *Client {
return &Client{
baseURL: strings.TrimRight(baseURL, "/"),
httpClient: &http.Client{
Timeout: 2 * time.Minute,
},
cacheDir: cacheDir,
sem: make(chan struct{}, concurrency),
}
}
// Get fetches the given endpoint, using disk cache when available.
func (c *Client) Get(ctx context.Context, endpoint string) ([]byte, error) {
// Check cache first (no semaphore needed for disk reads)
safeName := strings.NewReplacer("/", "_", "?", "_").Replace(endpoint) + ".json"
cachePath := filepath.Join(c.cacheDir, safeName)
if data, err := os.ReadFile(cachePath); err == nil {
return data, nil
}
// Acquire semaphore for HTTP request
select {
case c.sem <- struct{}{}:
defer func() { <-c.sem }()
case <-ctx.Done():
return nil, ctx.Err()
}
url := c.baseURL + "/" + endpoint
var data []byte
maxRetries := 3
for attempt := range maxRetries {
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return nil, fmt.Errorf("creating request for %s: %w", endpoint, err)
}
resp, err := c.httpClient.Do(req)
if err != nil {
if attempt < maxRetries-1 {
backoff := time.Duration(1<<uint(attempt)) * time.Second
time.Sleep(backoff)
continue
}
return nil, fmt.Errorf("fetching %s: %w", endpoint, err)
}
body, readErr := io.ReadAll(resp.Body)
resp.Body.Close()
if resp.StatusCode != http.StatusOK {
if attempt < maxRetries-1 && resp.StatusCode >= 500 {
backoff := time.Duration(1<<uint(attempt)) * time.Second
time.Sleep(backoff)
continue
}
return nil, fmt.Errorf("fetching %s: status %d", endpoint, resp.StatusCode)
}
if readErr != nil {
return nil, fmt.Errorf("reading response for %s: %w", endpoint, readErr)
}
data = body
break
}
// Write to cache
if err := os.MkdirAll(c.cacheDir, 0o755); err != nil {
return nil, fmt.Errorf("creating cache dir: %w", err)
}
if err := os.WriteFile(cachePath, data, 0o644); err != nil {
return nil, fmt.Errorf("writing cache for %s: %w", endpoint, err)
}
return data, nil
}
// ClearCache removes the cache directory.
func (c *Client) ClearCache() error {
return os.RemoveAll(c.cacheDir)
}

View File

@@ -0,0 +1,271 @@
package main
import (
"context"
"encoding/json"
"fmt"
"os"
"sort"
"strings"
"sync"
)
// fetchEvolutionData fetches evolution chains and returns flattened pairs.
func fetchEvolutionData(
ctx context.Context,
client *Client,
speciesData map[int]*SpeciesResp,
seededDex map[int]bool,
) ([]EvolutionOutput, error) {
fmt.Println("\n--- Fetching evolution chains ---")
// Extract unique chain IDs from species data
chainIDSet := make(map[int]bool)
for sid, species := range speciesData {
if seededDex[sid] {
chainIDSet[species.EvolutionChain.ID()] = true
}
}
chainIDs := make([]int, 0, len(chainIDSet))
for id := range chainIDSet {
chainIDs = append(chainIDs, id)
}
sort.Ints(chainIDs)
fmt.Printf(" Found %d unique evolution chains\n", len(chainIDs))
// Fetch chains concurrently
type chainResult struct {
chain EvolutionChainResp
}
results := make([]chainResult, len(chainIDs))
var wg sync.WaitGroup
errs := make([]error, len(chainIDs))
for i, cid := range chainIDs {
wg.Add(1)
go func(i, cid int) {
defer wg.Done()
data, err := client.Get(ctx, fmt.Sprintf("evolution-chain/%d", cid))
if err != nil {
errs[i] = err
return
}
if err := json.Unmarshal(data, &results[i].chain); err != nil {
errs[i] = fmt.Errorf("parsing evolution chain %d: %w", cid, err)
}
}(i, cid)
}
wg.Wait()
for _, err := range errs {
if err != nil {
return nil, err
}
}
// Flatten all chains
var allPairs []EvolutionOutput
type dedupeKey struct {
from, to int
trigger string
}
seen := make(map[dedupeKey]bool)
for _, r := range results {
pairs := flattenChain(r.chain.Chain, seededDex)
for _, p := range pairs {
key := dedupeKey{p.FromPokeAPIID, p.ToPokeAPIID, p.Trigger}
if !seen[key] {
seen[key] = true
allPairs = append(allPairs, p)
}
}
}
sort.Slice(allPairs, func(i, j int) bool {
if allPairs[i].FromPokeAPIID != allPairs[j].FromPokeAPIID {
return allPairs[i].FromPokeAPIID < allPairs[j].FromPokeAPIID
}
return allPairs[i].ToPokeAPIID < allPairs[j].ToPokeAPIID
})
fmt.Printf(" Total evolution pairs: %d\n", len(allPairs))
return allPairs, nil
}
// flattenChain recursively flattens an evolution chain into (from, to) pairs.
func flattenChain(chain ChainLink, seededDex map[int]bool) []EvolutionOutput {
var pairs []EvolutionOutput
fromDex := chain.Species.ID()
for _, evo := range chain.EvolvesTo {
toDex := evo.Species.ID()
for _, detail := range evo.EvolutionDetails {
trigger := detail.Trigger.Name
var minLevel *int
if detail.MinLevel != nil {
v := *detail.MinLevel
minLevel = &v
}
var item *string
if detail.Item != nil {
s := detail.Item.Name
item = &s
}
var heldItem *string
if detail.HeldItem != nil {
s := detail.HeldItem.Name
heldItem = &s
}
conditions := CollectEvolutionConditions(detail)
var condition *string
if len(conditions) > 0 {
s := strings.Join(conditions, ", ")
condition = &s
}
if seededDex[fromDex] && seededDex[toDex] {
pairs = append(pairs, EvolutionOutput{
FromPokeAPIID: fromDex,
ToPokeAPIID: toDex,
Trigger: trigger,
MinLevel: minLevel,
Item: item,
HeldItem: heldItem,
Condition: condition,
})
}
}
// Recurse
pairs = append(pairs, flattenChain(evo, seededDex)...)
}
return pairs
}
// EvolutionOverrides represents the evolution_overrides.json structure.
type EvolutionOverrides struct {
Remove []struct {
FromDex int `json:"from_dex"`
ToDex int `json:"to_dex"`
} `json:"remove"`
Add []struct {
FromDex int `json:"from_dex"`
ToDex int `json:"to_dex"`
Trigger string `json:"trigger"`
MinLevel *int `json:"min_level"`
Item *string `json:"item"`
HeldItem *string `json:"held_item"`
Condition *string `json:"condition"`
} `json:"add"`
Modify []struct {
FromDex int `json:"from_dex"`
ToDex int `json:"to_dex"`
Set map[string]interface{} `json:"set"`
} `json:"modify"`
}
// applyEvolutionOverrides applies overrides from evolution_overrides.json.
func applyEvolutionOverrides(evolutions []EvolutionOutput, overridesPath string) ([]EvolutionOutput, error) {
data, err := os.ReadFile(overridesPath)
if err != nil {
if os.IsNotExist(err) {
return evolutions, nil
}
return nil, fmt.Errorf("reading evolution overrides: %w", err)
}
var overrides EvolutionOverrides
if err := json.Unmarshal(data, &overrides); err != nil {
return nil, fmt.Errorf("parsing evolution overrides: %w", err)
}
// Remove entries
for _, removal := range overrides.Remove {
filtered := evolutions[:0]
for _, e := range evolutions {
if !(e.FromPokeAPIID == removal.FromDex && e.ToPokeAPIID == removal.ToDex) {
filtered = append(filtered, e)
}
}
evolutions = filtered
}
// Add entries
for _, addition := range overrides.Add {
trigger := addition.Trigger
if trigger == "" {
trigger = "level-up"
}
evolutions = append(evolutions, EvolutionOutput{
FromPokeAPIID: addition.FromDex,
ToPokeAPIID: addition.ToDex,
Trigger: trigger,
MinLevel: addition.MinLevel,
Item: addition.Item,
HeldItem: addition.HeldItem,
Condition: addition.Condition,
})
}
// Modify entries
for _, mod := range overrides.Modify {
for i := range evolutions {
e := &evolutions[i]
if e.FromPokeAPIID == mod.FromDex && e.ToPokeAPIID == mod.ToDex {
for key, value := range mod.Set {
switch key {
case "trigger":
if s, ok := value.(string); ok {
e.Trigger = s
}
case "min_level":
if v, ok := value.(float64); ok {
level := int(v)
e.MinLevel = &level
} else if value == nil {
e.MinLevel = nil
}
case "item":
if s, ok := value.(string); ok {
e.Item = &s
} else if value == nil {
e.Item = nil
}
case "held_item":
if s, ok := value.(string); ok {
e.HeldItem = &s
} else if value == nil {
e.HeldItem = nil
}
case "condition":
if s, ok := value.(string); ok {
e.Condition = &s
} else if value == nil {
e.Condition = nil
}
}
}
}
}
}
// Re-sort
sort.Slice(evolutions, func(i, j int) bool {
if evolutions[i].FromPokeAPIID != evolutions[j].FromPokeAPIID {
return evolutions[i].FromPokeAPIID < evolutions[j].FromPokeAPIID
}
return evolutions[i].ToPokeAPIID < evolutions[j].ToPokeAPIID
})
fmt.Printf(" Applied overrides: %d pairs after overrides\n", len(evolutions))
return evolutions, nil
}

View File

@@ -0,0 +1,3 @@
module nuzlocke-tracker/tools/fetch-pokeapi
go 1.22

256
tools/fetch-pokeapi/main.go Normal file
View File

@@ -0,0 +1,256 @@
package main
import (
"context"
"encoding/json"
"flag"
"fmt"
"os"
"path/filepath"
)
// Config structs for version_groups.json
type VersionGroupInfo struct {
Versions []string `json:"versions"`
Generation int `json:"generation"`
Region string `json:"region"`
RegionID int `json:"region_id"`
ExtraRegions []int `json:"extra_regions"`
Games map[string]GameInfo `json:"games"`
}
type GameInfo struct {
Name string `json:"name"`
Slug string `json:"slug"`
ReleaseYear int `json:"release_year"`
Color *string `json:"color"`
}
// Config structs for route_order.json
type RouteOrderFile struct {
Routes map[string][]string `json:"routes"`
Aliases map[string]string `json:"aliases"`
}
// Config structs for special_encounters.json
type SpecialEncountersFile struct {
Encounters map[string]map[string][]EncounterOutput `json:"encounters"`
Aliases map[string]string `json:"aliases"`
}
func getSpecialEncounters(se *SpecialEncountersFile, vgKey string) map[string][]EncounterOutput {
if se == nil {
return nil
}
if data, ok := se.Encounters[vgKey]; ok {
return data
}
if alias, ok := se.Aliases[vgKey]; ok {
if data, ok := se.Encounters[alias]; ok {
return data
}
}
return nil
}
func main() {
clearCache := flag.Bool("clear-cache", false, "Delete cached API responses before fetching")
flag.Parse()
pokeapiURL := os.Getenv("POKEAPI_URL")
if pokeapiURL == "" {
pokeapiURL = "http://localhost:8000/api/v2"
}
// Resolve paths relative to this tool's location or use the standard layout
seedsDir := findSeedsDir()
dataDir := filepath.Join(seedsDir, "data")
cacheDir := filepath.Join(seedsDir, ".pokeapi_cache")
client := NewClient(pokeapiURL, cacheDir, 50)
if *clearCache {
if err := client.ClearCache(); err != nil {
fmt.Fprintf(os.Stderr, "Warning: could not clear cache: %v\n", err)
} else {
fmt.Println("Cleared API cache.")
}
}
ctx := context.Background()
// Connectivity check
fmt.Printf("Connecting to PokeAPI at %s...\n", pokeapiURL)
if _, err := client.Get(ctx, "pokemon-species/1"); err != nil {
fmt.Fprintf(os.Stderr, "Error: Cannot connect to PokeAPI at %s\n %v\nStart the local PokeAPI server or set POKEAPI_URL.\n", pokeapiURL, err)
os.Exit(1)
}
// Load configs
versionGroups, err := loadJSON[map[string]VersionGroupInfo](filepath.Join(seedsDir, "version_groups.json"))
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading version_groups.json: %v\n", err)
os.Exit(1)
}
routeOrder, err := loadRouteOrder(filepath.Join(seedsDir, "route_order.json"))
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading route_order.json: %v\n", err)
os.Exit(1)
}
specialEnc, err := loadJSON[SpecialEncountersFile](filepath.Join(seedsDir, "special_encounters.json"))
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: could not load special_encounters.json: %v\n", err)
// Continue without special encounters
}
if err := os.MkdirAll(dataDir, 0o755); err != nil {
fmt.Fprintf(os.Stderr, "Error creating data dir: %v\n", err)
os.Exit(1)
}
// Build games.json
var games []GameOutput
for _, vgInfo := range *versionGroups {
for _, gameInfo := range vgInfo.Games {
games = append(games, GameOutput{
Name: gameInfo.Name,
Slug: gameInfo.Slug,
Generation: vgInfo.Generation,
Region: vgInfo.Region,
ReleaseYear: gameInfo.ReleaseYear,
Color: gameInfo.Color,
})
}
}
writeJSON(filepath.Join(dataDir, "games.json"), games)
fmt.Printf("Wrote %d games to games.json\n", len(games))
// Process each version
pokeIDCollector := NewPokeIDCollector()
for vgKey, vgInfo := range *versionGroups {
for _, verName := range vgInfo.Versions {
routes, err := processVersion(ctx, client, verName, vgInfo, vgKey, routeOrder, specialEnc, pokeIDCollector)
if err != nil {
fmt.Fprintf(os.Stderr, "Error processing %s: %v\n", verName, err)
os.Exit(1)
}
writeJSON(filepath.Join(dataDir, verName+".json"), routes)
}
}
// Fetch all species data (reused for pokemon discovery + evolutions)
speciesData, err := fetchAllSpecies(ctx, client)
if err != nil {
fmt.Fprintf(os.Stderr, "Error fetching species: %v\n", err)
os.Exit(1)
}
// Fetch all Pokemon (base + all forms)
pokemonList, err := fetchAllPokemon(ctx, client, speciesData, pokeIDCollector.IDs())
if err != nil {
fmt.Fprintf(os.Stderr, "Error fetching pokemon: %v\n", err)
os.Exit(1)
}
writeJSON(filepath.Join(dataDir, "pokemon.json"), pokemonList)
fmt.Printf("\nWrote %d Pokemon to pokemon.json\n", len(pokemonList))
// Build set of all seeded PokeAPI IDs for evolution filtering
allSeededDex := make(map[int]bool)
for _, p := range pokemonList {
allSeededDex[p.PokeAPIID] = true
}
// Fetch evolution chains
evolutions, err := fetchEvolutionData(ctx, client, speciesData, allSeededDex)
if err != nil {
fmt.Fprintf(os.Stderr, "Error fetching evolutions: %v\n", err)
os.Exit(1)
}
evolutions, err = applyEvolutionOverrides(evolutions, filepath.Join(dataDir, "evolution_overrides.json"))
if err != nil {
fmt.Fprintf(os.Stderr, "Error applying evolution overrides: %v\n", err)
os.Exit(1)
}
writeJSON(filepath.Join(dataDir, "evolutions.json"), evolutions)
fmt.Printf("\nWrote %d evolution pairs to evolutions.json\n", len(evolutions))
fmt.Println("\nDone! JSON files written to seeds/data/")
fmt.Println("Review route ordering and curate as needed.")
}
// findSeedsDir locates the backend/src/app/seeds directory.
func findSeedsDir() string {
// Try relative to CWD (from repo root)
candidates := []string{
"backend/src/app/seeds",
"../../backend/src/app/seeds", // from tools/fetch-pokeapi/
}
for _, c := range candidates {
if _, err := os.Stat(filepath.Join(c, "version_groups.json")); err == nil {
abs, _ := filepath.Abs(c)
return abs
}
}
// Fallback: try to find from executable location
exe, _ := os.Executable()
exeDir := filepath.Dir(exe)
rel := filepath.Join(exeDir, "../../backend/src/app/seeds")
if _, err := os.Stat(filepath.Join(rel, "version_groups.json")); err == nil {
abs, _ := filepath.Abs(rel)
return abs
}
// Default
abs, _ := filepath.Abs("backend/src/app/seeds")
return abs
}
func loadJSON[T any](path string) (*T, error) {
data, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var result T
if err := json.Unmarshal(data, &result); err != nil {
return nil, err
}
return &result, nil
}
func loadRouteOrder(path string) (map[string][]string, error) {
data, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var rof RouteOrderFile
if err := json.Unmarshal(data, &rof); err != nil {
return nil, err
}
routes := make(map[string][]string)
for k, v := range rof.Routes {
routes[k] = v
}
for alias, target := range rof.Aliases {
routes[alias] = routes[target]
}
return routes, nil
}
func writeJSON(path string, data interface{}) {
content, err := json.MarshalIndent(data, "", " ")
if err != nil {
fmt.Fprintf(os.Stderr, "Error marshaling JSON for %s: %v\n", path, err)
os.Exit(1)
}
content = append(content, '\n')
if err := os.WriteFile(path, content, 0o644); err != nil {
fmt.Fprintf(os.Stderr, "Error writing %s: %v\n", path, err)
os.Exit(1)
}
fmt.Printf(" -> %s\n", path)
}

View File

@@ -0,0 +1,46 @@
package main
// Output JSON structs — identical schema to current Python output.
type GameOutput struct {
Name string `json:"name"`
Slug string `json:"slug"`
Generation int `json:"generation"`
Region string `json:"region"`
ReleaseYear int `json:"release_year"`
Color *string `json:"color"`
}
type PokemonOutput struct {
PokeAPIID int `json:"pokeapi_id"`
NationalDex int `json:"national_dex"`
Name string `json:"name"`
Types []string `json:"types"`
SpriteURL string `json:"sprite_url"`
}
type EvolutionOutput struct {
FromPokeAPIID int `json:"from_pokeapi_id"`
ToPokeAPIID int `json:"to_pokeapi_id"`
Trigger string `json:"trigger"`
MinLevel *int `json:"min_level"`
Item *string `json:"item"`
HeldItem *string `json:"held_item"`
Condition *string `json:"condition"`
}
type RouteOutput struct {
Name string `json:"name"`
Order int `json:"order"`
Encounters []EncounterOutput `json:"encounters"`
Children []RouteOutput `json:"children,omitempty"`
}
type EncounterOutput struct {
PokeAPIID int `json:"pokeapi_id"`
PokemonName string `json:"pokemon_name"`
Method string `json:"method"`
EncounterRate int `json:"encounter_rate"`
MinLevel int `json:"min_level"`
MaxLevel int `json:"max_level"`
}

View File

@@ -0,0 +1,200 @@
package main
import (
"context"
"encoding/json"
"fmt"
"sort"
"sync"
)
// fetchAllSpecies fetches all pokemon-species objects.
// Returns a map of species_id -> SpeciesResp.
func fetchAllSpecies(ctx context.Context, client *Client) (map[int]*SpeciesResp, error) {
// Fetch the species list
listData, err := client.Get(ctx, "pokemon-species?limit=10000")
if err != nil {
return nil, fmt.Errorf("fetching species list: %w", err)
}
var listing SpeciesListResp
if err := json.Unmarshal(listData, &listing); err != nil {
return nil, fmt.Errorf("parsing species list: %w", err)
}
// Filter to IDs < 10000 and sort
var speciesIDs []int
for _, entry := range listing.Results {
id := entry.ID()
if id < 10000 {
speciesIDs = append(speciesIDs, id)
}
}
sort.Ints(speciesIDs)
fmt.Printf("\n--- Fetching %d species data ---\n", len(speciesIDs))
speciesData := make(map[int]*SpeciesResp, len(speciesIDs))
var mu sync.Mutex
var wg sync.WaitGroup
errs := make([]error, len(speciesIDs))
for i, sid := range speciesIDs {
wg.Add(1)
go func(i, sid int) {
defer wg.Done()
data, err := client.Get(ctx, fmt.Sprintf("pokemon-species/%d", sid))
if err != nil {
errs[i] = err
return
}
var species SpeciesResp
if err := json.Unmarshal(data, &species); err != nil {
errs[i] = fmt.Errorf("parsing species %d: %w", sid, err)
return
}
mu.Lock()
speciesData[sid] = &species
mu.Unlock()
}(i, sid)
// Print progress every 200
if (i+1)%200 == 0 || i+1 == len(speciesIDs) {
// Progress will be approximate due to concurrency
}
}
wg.Wait()
for _, err := range errs {
if err != nil {
return nil, err
}
}
fmt.Printf(" Fetched %d/%d species\n", len(speciesData), len(speciesIDs))
return speciesData, nil
}
// fetchAllPokemon fetches all Pokemon (base + forms) and returns sorted output.
func fetchAllPokemon(
ctx context.Context,
client *Client,
speciesData map[int]*SpeciesResp,
allPokeAPIIDs map[int]bool,
) ([]PokemonOutput, error) {
// Collect base species IDs and form IDs from species varieties
var baseIDs []int
var formIDs []int
formIDSet := make(map[int]bool)
for _, species := range speciesData {
for _, variety := range species.Varieties {
pid := variety.Pokemon.ID()
if variety.IsDefault {
baseIDs = append(baseIDs, pid)
} else {
formIDs = append(formIDs, pid)
formIDSet[pid] = true
}
}
}
// Also include form IDs from encounter data not in varieties
for id := range allPokeAPIIDs {
if id >= 10000 && !formIDSet[id] {
formIDs = append(formIDs, id)
}
}
sort.Ints(baseIDs)
sort.Ints(formIDs)
fmt.Printf("\n--- Fetching %d base Pokemon + %d forms ---\n", len(baseIDs), len(formIDs))
// Fetch base Pokemon concurrently
type pokemonResult struct {
output PokemonOutput
isForm bool
}
allIDs := make([]int, 0, len(baseIDs)+len(formIDs))
isFormFlag := make([]bool, 0, len(baseIDs)+len(formIDs))
for _, id := range baseIDs {
allIDs = append(allIDs, id)
isFormFlag = append(isFormFlag, false)
}
for _, id := range formIDs {
allIDs = append(allIDs, id)
isFormFlag = append(isFormFlag, true)
}
results := make([]pokemonResult, len(allIDs))
var wg sync.WaitGroup
errs := make([]error, len(allIDs))
for i, pid := range allIDs {
wg.Add(1)
go func(i, pid int, isForm bool) {
defer wg.Done()
data, err := client.Get(ctx, fmt.Sprintf("pokemon/%d", pid))
if err != nil {
errs[i] = err
return
}
var poke PokemonResp
if err := json.Unmarshal(data, &poke); err != nil {
errs[i] = fmt.Errorf("parsing pokemon %d: %w", pid, err)
return
}
var types []string
for _, t := range poke.Types {
types = append(types, t.Type.Name)
}
var name string
var nationalDex int
if isForm {
name = FormatFormName(poke.Name, poke.Species.Name)
nationalDex = poke.Species.ID()
} else {
name = toTitleCase(poke.Name)
nationalDex = pid
}
results[i] = pokemonResult{
output: PokemonOutput{
PokeAPIID: pid,
NationalDex: nationalDex,
Name: name,
Types: types,
SpriteURL: fmt.Sprintf("https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/%d.png", pid),
},
isForm: isForm,
}
}(i, pid, isFormFlag[i])
}
wg.Wait()
for _, err := range errs {
if err != nil {
return nil, err
}
}
pokemonList := make([]PokemonOutput, 0, len(results))
for _, r := range results {
pokemonList = append(pokemonList, r.output)
}
sort.Slice(pokemonList, func(i, j int) bool {
if pokemonList[i].NationalDex != pokemonList[j].NationalDex {
return pokemonList[i].NationalDex < pokemonList[j].NationalDex
}
return pokemonList[i].PokeAPIID < pokemonList[j].PokeAPIID
})
fmt.Printf(" Fetched %d base Pokemon\n", len(baseIDs))
fmt.Printf(" Fetched %d forms\n", len(formIDs))
return pokemonList, nil
}

View File

@@ -0,0 +1,469 @@
package main
import (
"context"
"encoding/json"
"fmt"
"sort"
"strings"
"sync"
)
var includedMethods = map[string]bool{
"walk": true,
"surf": true,
"old-rod": true,
"good-rod": true,
"super-rod": true,
"rock-smash": true,
"headbutt": true,
}
// processVersion processes all locations for a game version and returns the route list.
func processVersion(
ctx context.Context,
client *Client,
versionName string,
vgInfo VersionGroupInfo,
vgKey string,
routeOrder map[string][]string,
specialEnc *SpecialEncountersFile,
pokeIDCollector *PokeIDCollector,
) ([]RouteOutput, error) {
fmt.Printf("\n--- Processing %s ---\n", versionName)
// Fetch region
regionData, err := client.Get(ctx, fmt.Sprintf("region/%d", vgInfo.RegionID))
if err != nil {
return nil, fmt.Errorf("fetching region %d: %w", vgInfo.RegionID, err)
}
var region RegionResp
if err := json.Unmarshal(regionData, &region); err != nil {
return nil, fmt.Errorf("parsing region %d: %w", vgInfo.RegionID, err)
}
locationRefs := make([]NamedRef, len(region.Locations))
copy(locationRefs, region.Locations)
// Include extra regions
for _, extraRegionID := range vgInfo.ExtraRegions {
extraData, err := client.Get(ctx, fmt.Sprintf("region/%d", extraRegionID))
if err != nil {
return nil, fmt.Errorf("fetching extra region %d: %w", extraRegionID, err)
}
var extraRegion RegionResp
if err := json.Unmarshal(extraData, &extraRegion); err != nil {
return nil, fmt.Errorf("parsing extra region %d: %w", extraRegionID, err)
}
locationRefs = append(locationRefs, extraRegion.Locations...)
}
fmt.Printf(" Found %d locations\n", len(locationRefs))
// Fetch all locations concurrently
type locationResult struct {
locName string
locID int
areas []NamedRef
}
locResults := make([]locationResult, len(locationRefs))
var wg sync.WaitGroup
errs := make([]error, len(locationRefs))
for i, locRef := range locationRefs {
wg.Add(1)
go func(i int, locRef NamedRef) {
defer wg.Done()
locData, err := client.Get(ctx, fmt.Sprintf("location/%d", locRef.ID()))
if err != nil {
errs[i] = err
return
}
var loc LocationResp
if err := json.Unmarshal(locData, &loc); err != nil {
errs[i] = err
return
}
locResults[i] = locationResult{
locName: locRef.Name,
locID: locRef.ID(),
areas: loc.Areas,
}
}(i, locRef)
}
wg.Wait()
for _, err := range errs {
if err != nil {
return nil, err
}
}
// Fetch all area encounters concurrently
type areaWork struct {
locIdx int
areaRef NamedRef
locName string
areaCount int // total areas for this location
}
var areaJobs []areaWork
for i, lr := range locResults {
for _, areaRef := range lr.areas {
areaJobs = append(areaJobs, areaWork{
locIdx: i,
areaRef: areaRef,
locName: lr.locName,
areaCount: len(lr.areas),
})
}
}
type areaResult struct {
locIdx int
areaSuffix string
areaCount int
encounters []EncounterOutput
}
areaResults := make([]areaResult, len(areaJobs))
areaErrs := make([]error, len(areaJobs))
for i, job := range areaJobs {
wg.Add(1)
go func(i int, job areaWork) {
defer wg.Done()
encs, err := getEncountersForArea(ctx, client, job.areaRef.ID(), versionName)
if err != nil {
areaErrs[i] = err
return
}
areaSuffix := CleanAreaName(job.areaRef.Name, job.locName)
areaResults[i] = areaResult{
locIdx: job.locIdx,
areaSuffix: areaSuffix,
areaCount: job.areaCount,
encounters: encs,
}
}(i, job)
}
wg.Wait()
for _, err := range areaErrs {
if err != nil {
return nil, err
}
}
// Group area results by location
type locAreaData struct {
allEncounters []EncounterOutput
areaSpecific map[string][]EncounterOutput
}
locAreas := make(map[int]*locAreaData)
for _, ar := range areaResults {
ld, ok := locAreas[ar.locIdx]
if !ok {
ld = &locAreaData{areaSpecific: make(map[string][]EncounterOutput)}
locAreas[ar.locIdx] = ld
}
if len(ar.encounters) == 0 {
continue
}
if ar.areaSuffix != "" && ar.areaCount > 1 {
ld.areaSpecific[ar.areaSuffix] = append(ld.areaSpecific[ar.areaSuffix], ar.encounters...)
} else {
ld.allEncounters = append(ld.allEncounters, ar.encounters...)
}
}
// Build routes
var routes []RouteOutput
for i, lr := range locResults {
if len(lr.areas) == 0 {
continue
}
displayName := CleanLocationName(lr.locName)
ld, ok := locAreas[i]
if !ok {
continue
}
// Multiple area-specific encounters -> parent with children
if len(ld.areaSpecific) > 1 {
var childRoutes []RouteOutput
for areaSuffix, areaEncs := range ld.areaSpecific {
aggregated := aggregateEncounters(areaEncs)
if len(aggregated) > 0 {
routeName := fmt.Sprintf("%s (%s)", displayName, areaSuffix)
for _, enc := range aggregated {
pokeIDCollector.Add(enc.PokeAPIID)
}
childRoutes = append(childRoutes, RouteOutput{
Name: routeName,
Order: 0,
Encounters: aggregated,
})
}
}
if len(childRoutes) > 0 {
routes = append(routes, RouteOutput{
Name: displayName,
Order: 0,
Encounters: []EncounterOutput{},
Children: childRoutes,
})
}
} else if len(ld.areaSpecific) == 1 {
// Single area-specific -> no parent/child
for areaSuffix, areaEncs := range ld.areaSpecific {
aggregated := aggregateEncounters(areaEncs)
if len(aggregated) > 0 {
routeName := fmt.Sprintf("%s (%s)", displayName, areaSuffix)
for _, enc := range aggregated {
pokeIDCollector.Add(enc.PokeAPIID)
}
routes = append(routes, RouteOutput{
Name: routeName,
Order: 0,
Encounters: aggregated,
})
}
}
}
// Non-area-specific encounters
if len(ld.allEncounters) > 0 {
aggregated := aggregateEncounters(ld.allEncounters)
if len(aggregated) > 0 {
for _, enc := range aggregated {
pokeIDCollector.Add(enc.PokeAPIID)
}
routes = append(routes, RouteOutput{
Name: displayName,
Order: 0,
Encounters: aggregated,
})
}
}
}
// Merge special encounters
specialData := getSpecialEncounters(specialEnc, vgKey)
if specialData != nil {
routes = mergeSpecialEncounters(routes, specialData, pokeIDCollector)
}
// Sort by game progression
routes = sortRoutesByProgression(routes, vgKey, routeOrder)
// Assign sequential order values
order := 1
for i := range routes {
routes[i].Order = order
order++
for j := range routes[i].Children {
routes[i].Children[j].Order = order
order++
}
}
// Stats
totalRoutes := 0
totalEnc := 0
for _, r := range routes {
totalRoutes += 1 + len(r.Children)
totalEnc += len(r.Encounters)
for _, c := range r.Children {
totalEnc += len(c.Encounters)
}
}
fmt.Printf(" Routes with encounters: %d\n", totalRoutes)
fmt.Printf(" Total encounter entries: %d\n", totalEnc)
return routes, nil
}
// getEncountersForArea fetches encounter data for a location area, filtered by version.
func getEncountersForArea(
ctx context.Context,
client *Client,
areaID int,
versionName string,
) ([]EncounterOutput, error) {
data, err := client.Get(ctx, fmt.Sprintf("location-area/%d", areaID))
if err != nil {
return nil, err
}
var area LocationAreaResp
if err := json.Unmarshal(data, &area); err != nil {
return nil, err
}
var encounters []EncounterOutput
for _, pe := range area.PokemonEncounters {
dexNum := pe.Pokemon.ID()
pokemonName := pe.Pokemon.Name
for _, vd := range pe.VersionDetails {
if vd.Version.Name != versionName {
continue
}
for _, enc := range vd.EncounterDetails {
method := enc.Method.Name
if !includedMethods[method] {
continue
}
encounters = append(encounters, EncounterOutput{
PokemonName: pokemonName,
PokeAPIID: dexNum,
Method: method,
EncounterRate: enc.Chance,
MinLevel: enc.MinLevel,
MaxLevel: enc.MaxLevel,
})
}
}
}
return encounters, nil
}
// aggregateEncounters groups encounters by (pokeapi_id, method) and sums rates.
func aggregateEncounters(raw []EncounterOutput) []EncounterOutput {
type key struct {
id int
method string
}
agg := make(map[key]*EncounterOutput)
var order []key // preserve insertion order
for _, enc := range raw {
k := key{enc.PokeAPIID, enc.Method}
if existing, ok := agg[k]; ok {
existing.EncounterRate += enc.EncounterRate
if enc.MinLevel < existing.MinLevel {
existing.MinLevel = enc.MinLevel
}
if enc.MaxLevel > existing.MaxLevel {
existing.MaxLevel = enc.MaxLevel
}
} else {
e := enc // copy
agg[k] = &e
order = append(order, k)
}
}
result := make([]EncounterOutput, 0, len(agg))
for _, k := range order {
e := agg[k]
if e.EncounterRate > 100 {
e.EncounterRate = 100
}
result = append(result, *e)
}
sort.Slice(result, func(i, j int) bool {
if result[i].EncounterRate != result[j].EncounterRate {
return result[i].EncounterRate > result[j].EncounterRate
}
return result[i].PokemonName < result[j].PokemonName
})
return result
}
// mergeSpecialEncounters merges special encounters into existing routes or creates new ones.
func mergeSpecialEncounters(
routes []RouteOutput,
specialData map[string][]EncounterOutput,
pokeIDCollector *PokeIDCollector,
) []RouteOutput {
// Build lookup: route name -> route pointer (including children)
routeMap := make(map[string]*RouteOutput)
for i := range routes {
routeMap[routes[i].Name] = &routes[i]
for j := range routes[i].Children {
routeMap[routes[i].Children[j].Name] = &routes[i].Children[j]
}
}
for locationName, encounters := range specialData {
for _, enc := range encounters {
pokeIDCollector.Add(enc.PokeAPIID)
}
if route, ok := routeMap[locationName]; ok {
route.Encounters = append(route.Encounters, encounters...)
} else {
newRoute := RouteOutput{
Name: locationName,
Order: 0,
Encounters: encounters,
}
routes = append(routes, newRoute)
routeMap[locationName] = &routes[len(routes)-1]
}
}
return routes
}
// sortRoutesByProgression sorts routes by game progression order.
func sortRoutesByProgression(routes []RouteOutput, vgKey string, routeOrder map[string][]string) []RouteOutput {
orderList, ok := routeOrder[vgKey]
if !ok {
return routes
}
sort.SliceStable(routes, func(i, j int) bool {
iKey := routeSortKey(routes[i].Name, orderList)
jKey := routeSortKey(routes[j].Name, orderList)
if iKey.pos != jKey.pos {
return iKey.pos < jKey.pos
}
return iKey.name < jKey.name
})
return routes
}
type sortKey struct {
pos int
name string
}
func routeSortKey(name string, orderList []string) sortKey {
for i, orderedName := range orderList {
if name == orderedName || strings.HasPrefix(name, orderedName+" (") {
return sortKey{i, name}
}
}
return sortKey{len(orderList), name}
}
// PokeIDCollector is a thread-safe collector for PokeAPI IDs encountered during processing.
type PokeIDCollector struct {
mu sync.Mutex
ids map[int]bool
}
func NewPokeIDCollector() *PokeIDCollector {
return &PokeIDCollector{ids: make(map[int]bool)}
}
func (c *PokeIDCollector) Add(id int) {
c.mu.Lock()
c.ids[id] = true
c.mu.Unlock()
}
func (c *PokeIDCollector) IDs() map[int]bool {
c.mu.Lock()
defer c.mu.Unlock()
result := make(map[int]bool, len(c.ids))
for k, v := range c.ids {
result[k] = v
}
return result
}

114
tools/fetch-pokeapi/text.go Normal file
View File

@@ -0,0 +1,114 @@
package main
import (
"fmt"
"strings"
)
var regionPrefixes = []string{
"kanto-", "johto-", "hoenn-", "sinnoh-",
"unova-", "kalos-", "alola-", "galar-",
}
// CleanLocationName converts a PokeAPI location slug to a clean display name.
// e.g. "kanto-route-1" -> "Route 1", "pallet-town" -> "Pallet Town"
func CleanLocationName(name string) string {
for _, prefix := range regionPrefixes {
if strings.HasPrefix(name, prefix) {
name = name[len(prefix):]
break
}
}
name = strings.ReplaceAll(name, "-", " ")
name = strings.Title(name) //nolint:staticcheck
return name
}
// CleanAreaName extracts a meaningful area suffix, or empty string if it's the default area.
func CleanAreaName(areaName, locationName string) string {
if strings.HasPrefix(areaName, locationName) {
suffix := strings.TrimPrefix(areaName, locationName)
suffix = strings.Trim(suffix, "- ")
if suffix == "" || suffix == "area" {
return ""
}
suffix = strings.ReplaceAll(suffix, "-", " ")
return strings.Title(suffix) //nolint:staticcheck
}
result := strings.ReplaceAll(areaName, "-", " ")
return strings.Title(result) //nolint:staticcheck
}
// FormatFormName converts a PokeAPI pokemon form name to a display name.
// e.g. "rattata-alola" (species: "rattata") -> "Rattata (Alola)"
func FormatFormName(fullName, speciesName string) string {
if strings.HasPrefix(fullName, speciesName+"-") {
formSuffix := fullName[len(speciesName)+1:]
base := toTitleCase(speciesName)
suffix := toTitleCase(formSuffix)
return fmt.Sprintf("%s (%s)", base, suffix)
}
return toTitleCase(fullName)
}
// toTitleCase converts a hyphenated slug to Title Case with spaces.
func toTitleCase(s string) string {
s = strings.ReplaceAll(s, "-", " ")
return strings.Title(s) //nolint:staticcheck
}
// CollectEvolutionConditions extracts human-readable condition strings from an EvolutionDetail.
func CollectEvolutionConditions(detail EvolutionDetail) []string {
var conditions []string
if detail.MinHappiness != nil {
conditions = append(conditions, fmt.Sprintf("happiness >= %d", *detail.MinHappiness))
}
if detail.MinAffection != nil {
conditions = append(conditions, fmt.Sprintf("affection >= %d", *detail.MinAffection))
}
if detail.MinBeauty != nil {
conditions = append(conditions, fmt.Sprintf("beauty >= %d", *detail.MinBeauty))
}
if detail.TimeOfDay != "" {
conditions = append(conditions, detail.TimeOfDay)
}
if detail.KnownMove != nil {
conditions = append(conditions, fmt.Sprintf("knows %s", detail.KnownMove.Name))
}
if detail.KnownMoveType != nil {
conditions = append(conditions, fmt.Sprintf("knows %s-type move", detail.KnownMoveType.Name))
}
if detail.Location != nil {
conditions = append(conditions, fmt.Sprintf("at %s", detail.Location.Name))
}
if detail.PartySpecies != nil {
conditions = append(conditions, fmt.Sprintf("with %s in party", detail.PartySpecies.Name))
}
if detail.PartyType != nil {
conditions = append(conditions, fmt.Sprintf("with %s-type in party", detail.PartyType.Name))
}
if detail.Gender != nil {
if *detail.Gender == 1 {
conditions = append(conditions, "female")
} else {
conditions = append(conditions, "male")
}
}
if detail.NeedsOverworldRain {
conditions = append(conditions, "raining")
}
if detail.TurnUpsideDown {
conditions = append(conditions, "turn upside down")
}
if detail.TradeSpecies != nil {
conditions = append(conditions, fmt.Sprintf("trade for %s", detail.TradeSpecies.Name))
}
if detail.RelativePhysicalStats != nil {
statMap := map[int]string{1: "atk > def", -1: "atk < def", 0: "atk = def"}
if s, ok := statMap[*detail.RelativePhysicalStats]; ok {
conditions = append(conditions, s)
}
}
return conditions
}

View File

@@ -0,0 +1,110 @@
package main
import (
"strconv"
"strings"
)
// NamedRef is a PokeAPI named resource reference (name + URL).
type NamedRef struct {
Name string `json:"name"`
URL string `json:"url"`
}
// ID extracts the trailing integer ID from the URL path.
// e.g. "https://pokeapi.co/api/v2/pokemon/25/" -> 25
func (r NamedRef) ID() int {
s := strings.TrimRight(r.URL, "/")
parts := strings.Split(s, "/")
id, _ := strconv.Atoi(parts[len(parts)-1])
return id
}
// ---- PokeAPI response structs (only fields we use) ----
type SpeciesListResp struct {
Results []NamedRef `json:"results"`
}
type RegionResp struct {
Locations []NamedRef `json:"locations"`
}
type LocationResp struct {
Areas []NamedRef `json:"areas"`
}
type LocationAreaResp struct {
PokemonEncounters []PokemonEncounter `json:"pokemon_encounters"`
}
type PokemonEncounter struct {
Pokemon NamedRef `json:"pokemon"`
VersionDetails []VersionDetail `json:"version_details"`
}
type VersionDetail struct {
Version NamedRef `json:"version"`
EncounterDetails []EncounterDetail `json:"encounter_details"`
}
type EncounterDetail struct {
Chance int `json:"chance"`
Method NamedRef `json:"method"`
MinLevel int `json:"min_level"`
MaxLevel int `json:"max_level"`
}
type SpeciesResp struct {
ID int `json:"id"`
Name string `json:"name"`
EvolutionChain NamedRef `json:"evolution_chain"`
Varieties []struct {
IsDefault bool `json:"is_default"`
Pokemon NamedRef `json:"pokemon"`
} `json:"varieties"`
}
type PokemonResp struct {
ID int `json:"id"`
Name string `json:"name"`
Species NamedRef `json:"species"`
Types []struct {
Slot int `json:"slot"`
Type struct {
Name string `json:"name"`
} `json:"type"`
} `json:"types"`
}
type EvolutionChainResp struct {
ID int `json:"id"`
Chain ChainLink `json:"chain"`
}
type ChainLink struct {
Species NamedRef `json:"species"`
EvolvesTo []ChainLink `json:"evolves_to"`
EvolutionDetails []EvolutionDetail `json:"evolution_details"`
}
type EvolutionDetail struct {
Trigger NamedRef `json:"trigger"`
MinLevel *int `json:"min_level"`
Item *NamedRef `json:"item"`
HeldItem *NamedRef `json:"held_item"`
MinHappiness *int `json:"min_happiness"`
MinAffection *int `json:"min_affection"`
MinBeauty *int `json:"min_beauty"`
TimeOfDay string `json:"time_of_day"`
KnownMove *NamedRef `json:"known_move"`
KnownMoveType *NamedRef `json:"known_move_type"`
Location *NamedRef `json:"location"`
PartySpecies *NamedRef `json:"party_species"`
PartyType *NamedRef `json:"party_type"`
Gender *int `json:"gender"`
NeedsOverworldRain bool `json:"needs_overworld_rain"`
TurnUpsideDown bool `json:"turn_upside_down"`
TradeSpecies *NamedRef `json:"trade_species"`
RelativePhysicalStats *int `json:"relative_physical_stats"`
}