Daedalus and Talos integration test
All checks were successful
CI / backend-tests (push) Successful in 26s
CI / frontend-tests (push) Successful in 29s

This commit is contained in:
Julian Tabel
2026-03-20 16:31:19 +01:00
parent 5106e57685
commit c9d42b091f
44 changed files with 8345 additions and 31 deletions

View File

@@ -0,0 +1,187 @@
#!/usr/bin/env python3
"""Fetch moves and abilities from PokeAPI and save as seed data JSON files.
Usage:
cd backend && uv run python scripts/fetch_moves_abilities.py
This script fetches all moves and abilities from PokeAPI, extracts their names
and introduced generation, and saves them to the seed data directory.
"""
import asyncio
import json
import re
from pathlib import Path
import httpx
DATA_DIR = Path(__file__).parent.parent / "src" / "app" / "seeds" / "data"
POKEAPI_BASE = "https://pokeapi.co/api/v2"
# Map generation names to numbers
GEN_MAP = {
"generation-i": 1,
"generation-ii": 2,
"generation-iii": 3,
"generation-iv": 4,
"generation-v": 5,
"generation-vi": 6,
"generation-vii": 7,
"generation-viii": 8,
"generation-ix": 9,
}
def title_case_name(name: str) -> str:
"""Convert a hyphenated PokeAPI name to title case.
Examples:
'thunder-punch' -> 'Thunder Punch'
'self-destruct' -> 'Self-Destruct'
"""
return " ".join(word.capitalize() for word in name.split("-"))
async def fetch_all_moves(client: httpx.AsyncClient) -> list[dict]:
"""Fetch all moves from PokeAPI."""
moves = []
# First, get the list of all moves
print("Fetching move list...")
url = f"{POKEAPI_BASE}/move?limit=10000"
resp = await client.get(url)
resp.raise_for_status()
data = resp.json()
move_urls = [m["url"] for m in data["results"]]
print(f"Found {len(move_urls)} moves")
# Fetch each move's details in batches
batch_size = 50
for i in range(0, len(move_urls), batch_size):
batch = move_urls[i : i + batch_size]
print(f"Fetching moves {i + 1}-{min(i + batch_size, len(move_urls))}...")
tasks = [client.get(url) for url in batch]
responses = await asyncio.gather(*tasks, return_exceptions=True)
for resp in responses:
if isinstance(resp, Exception):
print(f" Error fetching move: {resp}")
continue
if resp.status_code != 200:
print(f" HTTP {resp.status_code} for {resp.url}")
continue
move_data = resp.json()
gen_name = move_data["generation"]["name"]
introduced_gen = GEN_MAP.get(gen_name)
if introduced_gen is None:
print(f" Unknown generation: {gen_name} for move {move_data['name']}")
continue
# Get type if available
move_type = None
if move_data.get("type"):
move_type = move_data["type"]["name"]
moves.append(
{
"name": title_case_name(move_data["name"]),
"introduced_gen": introduced_gen,
"type": move_type,
}
)
# Sort by name for consistent ordering
moves.sort(key=lambda m: m["name"])
return moves
async def fetch_all_abilities(client: httpx.AsyncClient) -> list[dict]:
"""Fetch all abilities from PokeAPI."""
abilities = []
# First, get the list of all abilities
print("Fetching ability list...")
url = f"{POKEAPI_BASE}/ability?limit=10000"
resp = await client.get(url)
resp.raise_for_status()
data = resp.json()
ability_urls = [a["url"] for a in data["results"]]
print(f"Found {len(ability_urls)} abilities")
# Fetch each ability's details in batches
batch_size = 50
for i in range(0, len(ability_urls), batch_size):
batch = ability_urls[i : i + batch_size]
print(f"Fetching abilities {i + 1}-{min(i + batch_size, len(ability_urls))}...")
tasks = [client.get(url) for url in batch]
responses = await asyncio.gather(*tasks, return_exceptions=True)
for resp in responses:
if isinstance(resp, Exception):
print(f" Error fetching ability: {resp}")
continue
if resp.status_code != 200:
print(f" HTTP {resp.status_code} for {resp.url}")
continue
ability_data = resp.json()
gen_name = ability_data["generation"]["name"]
introduced_gen = GEN_MAP.get(gen_name)
if introduced_gen is None:
print(
f" Unknown generation: {gen_name} for ability {ability_data['name']}"
)
continue
abilities.append(
{
"name": title_case_name(ability_data["name"]),
"introduced_gen": introduced_gen,
}
)
# Sort by name for consistent ordering
abilities.sort(key=lambda a: a["name"])
return abilities
async def main():
print("Fetching moves and abilities from PokeAPI...")
print()
async with httpx.AsyncClient(timeout=30.0) as client:
# Fetch moves
moves = await fetch_all_moves(client)
print()
# Fetch abilities
abilities = await fetch_all_abilities(client)
print()
# Write moves to JSON
moves_path = DATA_DIR / "moves.json"
with open(moves_path, "w") as f:
json.dump(moves, f, indent=2)
f.write("\n")
print(f"Wrote {len(moves)} moves to {moves_path}")
# Write abilities to JSON
abilities_path = DATA_DIR / "abilities.json"
with open(abilities_path, "w") as f:
json.dump(abilities, f, indent=2)
f.write("\n")
print(f"Wrote {len(abilities)} abilities to {abilities_path}")
print()
print("Done!")
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,46 @@
"""add moves and abilities tables
Revision ID: j1e2f3a4b5c6
Revises: i0d1e2f3a4b5
Create Date: 2026-03-20 12:00:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "j1e2f3a4b5c6"
down_revision: str | Sequence[str] | None = "i0d1e2f3a4b5"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Create moves table
op.create_table(
"moves",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(50), nullable=False, unique=True),
sa.Column("introduced_gen", sa.SmallInteger(), nullable=False),
sa.Column("type", sa.String(20), nullable=True),
)
op.create_index("ix_moves_introduced_gen", "moves", ["introduced_gen"])
# Create abilities table
op.create_table(
"abilities",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(50), nullable=False, unique=True),
sa.Column("introduced_gen", sa.SmallInteger(), nullable=False),
)
op.create_index("ix_abilities_introduced_gen", "abilities", ["introduced_gen"])
def downgrade() -> None:
op.drop_index("ix_abilities_introduced_gen", "abilities")
op.drop_table("abilities")
op.drop_index("ix_moves_introduced_gen", "moves")
op.drop_table("moves")

View File

@@ -0,0 +1,63 @@
"""add journal entries table
Revision ID: k2f3a4b5c6d7
Revises: j1e2f3a4b5c6
Create Date: 2026-03-20 12:00:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "k2f3a4b5c6d7"
down_revision: str | Sequence[str] | None = "j1e2f3a4b5c6"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
op.create_table(
"journal_entries",
sa.Column(
"id",
sa.UUID(),
primary_key=True,
server_default=sa.text("gen_random_uuid()"),
),
sa.Column(
"run_id",
sa.Integer(),
sa.ForeignKey("nuzlocke_runs.id", ondelete="CASCADE"),
nullable=False,
index=True,
),
sa.Column(
"boss_result_id",
sa.Integer(),
sa.ForeignKey("boss_results.id", ondelete="SET NULL"),
nullable=True,
index=True,
),
sa.Column("title", sa.String(200), nullable=False),
sa.Column("body", sa.Text(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
onupdate=sa.func.now(),
),
)
def downgrade() -> None:
op.drop_table("journal_entries")

View File

@@ -0,0 +1,151 @@
from datetime import UTC, datetime
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Response
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_session
from app.models.boss_result import BossResult
from app.models.journal_entry import JournalEntry
from app.models.nuzlocke_run import NuzlockeRun
from app.schemas.journal_entry import (
JournalEntryCreate,
JournalEntryResponse,
JournalEntryUpdate,
)
router = APIRouter()
@router.get("/{run_id}/journal", response_model=list[JournalEntryResponse])
async def list_journal_entries(
run_id: int,
boss_result_id: int | None = None,
session: AsyncSession = Depends(get_session),
):
# Validate run exists
run = await session.get(NuzlockeRun, run_id)
if run is None:
raise HTTPException(status_code=404, detail="Run not found")
query = select(JournalEntry).where(JournalEntry.run_id == run_id)
if boss_result_id is not None:
query = query.where(JournalEntry.boss_result_id == boss_result_id)
query = query.order_by(JournalEntry.created_at.desc())
result = await session.execute(query)
return result.scalars().all()
@router.post("/{run_id}/journal", response_model=JournalEntryResponse, status_code=201)
async def create_journal_entry(
run_id: int,
data: JournalEntryCreate,
session: AsyncSession = Depends(get_session),
):
# Validate run exists
run = await session.get(NuzlockeRun, run_id)
if run is None:
raise HTTPException(status_code=404, detail="Run not found")
# Validate boss_result_id if provided
if data.boss_result_id is not None:
boss_result = await session.get(BossResult, data.boss_result_id)
if boss_result is None:
raise HTTPException(status_code=404, detail="Boss result not found")
if boss_result.run_id != run_id:
raise HTTPException(
status_code=400, detail="Boss result does not belong to this run"
)
entry = JournalEntry(
run_id=run_id,
boss_result_id=data.boss_result_id,
title=data.title,
body=data.body,
)
session.add(entry)
await session.commit()
await session.refresh(entry)
return entry
@router.get("/{run_id}/journal/{entry_id}", response_model=JournalEntryResponse)
async def get_journal_entry(
run_id: int,
entry_id: UUID,
session: AsyncSession = Depends(get_session),
):
result = await session.execute(
select(JournalEntry).where(
JournalEntry.id == entry_id,
JournalEntry.run_id == run_id,
)
)
entry = result.scalar_one_or_none()
if entry is None:
raise HTTPException(status_code=404, detail="Journal entry not found")
return entry
@router.put("/{run_id}/journal/{entry_id}", response_model=JournalEntryResponse)
async def update_journal_entry(
run_id: int,
entry_id: UUID,
data: JournalEntryUpdate,
session: AsyncSession = Depends(get_session),
):
result = await session.execute(
select(JournalEntry).where(
JournalEntry.id == entry_id,
JournalEntry.run_id == run_id,
)
)
entry = result.scalar_one_or_none()
if entry is None:
raise HTTPException(status_code=404, detail="Journal entry not found")
update_data = data.model_dump(exclude_unset=True)
# Validate boss_result_id if provided
if "boss_result_id" in update_data and update_data["boss_result_id"] is not None:
boss_result = await session.get(BossResult, update_data["boss_result_id"])
if boss_result is None:
raise HTTPException(status_code=404, detail="Boss result not found")
if boss_result.run_id != run_id:
raise HTTPException(
status_code=400, detail="Boss result does not belong to this run"
)
for field, value in update_data.items():
setattr(entry, field, value)
entry.updated_at = datetime.now(UTC)
await session.commit()
await session.refresh(entry)
return entry
@router.delete("/{run_id}/journal/{entry_id}", status_code=204)
async def delete_journal_entry(
run_id: int,
entry_id: UUID,
session: AsyncSession = Depends(get_session),
):
result = await session.execute(
select(JournalEntry).where(
JournalEntry.id == entry_id,
JournalEntry.run_id == run_id,
)
)
entry = result.scalar_one_or_none()
if entry is None:
raise HTTPException(status_code=404, detail="Journal entry not found")
await session.delete(entry)
await session.commit()
return Response(status_code=204)

View File

@@ -8,6 +8,7 @@ from app.api import (
games,
genlockes,
health,
journal_entries,
pokemon,
runs,
stats,
@@ -19,6 +20,7 @@ api_router.include_router(games.router, prefix="/games", tags=["games"])
api_router.include_router(pokemon.router, tags=["pokemon"])
api_router.include_router(evolutions.router, tags=["evolutions"])
api_router.include_router(runs.router, prefix="/runs", tags=["runs"])
api_router.include_router(journal_entries.router, prefix="/runs", tags=["journal"])
api_router.include_router(genlockes.router, prefix="/genlockes", tags=["genlockes"])
api_router.include_router(encounters.router, tags=["encounters"])
api_router.include_router(stats.router, prefix="/stats", tags=["stats"])

View File

@@ -1,3 +1,4 @@
from app.models.ability import Ability
from app.models.boss_battle import BossBattle
from app.models.boss_pokemon import BossPokemon
from app.models.boss_result import BossResult
@@ -6,6 +7,8 @@ from app.models.evolution import Evolution
from app.models.game import Game
from app.models.genlocke import Genlocke, GenlockeLeg
from app.models.genlocke_transfer import GenlockeTransfer
from app.models.journal_entry import JournalEntry
from app.models.move import Move
from app.models.nuzlocke_run import NuzlockeRun
from app.models.pokemon import Pokemon
from app.models.route import Route
@@ -13,6 +16,7 @@ from app.models.route_encounter import RouteEncounter
from app.models.version_group import VersionGroup
__all__ = [
"Ability",
"BossBattle",
"BossPokemon",
"BossResult",
@@ -22,6 +26,8 @@ __all__ = [
"Genlocke",
"GenlockeLeg",
"GenlockeTransfer",
"JournalEntry",
"Move",
"NuzlockeRun",
"Pokemon",
"Route",

View File

@@ -0,0 +1,15 @@
from sqlalchemy import SmallInteger, String
from sqlalchemy.orm import Mapped, mapped_column
from app.core.database import Base
class Ability(Base):
__tablename__ = "abilities"
id: Mapped[int] = mapped_column(primary_key=True)
name: Mapped[str] = mapped_column(String(50), unique=True)
introduced_gen: Mapped[int] = mapped_column(SmallInteger)
def __repr__(self) -> str:
return f"<Ability(id={self.id}, name='{self.name}', gen={self.introduced_gen})>"

View File

@@ -0,0 +1,37 @@
from datetime import datetime
from uuid import UUID
from sqlalchemy import DateTime, ForeignKey, String, Text, func
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.core.database import Base
class JournalEntry(Base):
__tablename__ = "journal_entries"
id: Mapped[UUID] = mapped_column(
primary_key=True, server_default=func.gen_random_uuid()
)
run_id: Mapped[int] = mapped_column(
ForeignKey("nuzlocke_runs.id", ondelete="CASCADE"), index=True
)
boss_result_id: Mapped[int | None] = mapped_column(
ForeignKey("boss_results.id", ondelete="SET NULL"), index=True
)
title: Mapped[str] = mapped_column(String(200))
body: Mapped[str] = mapped_column(Text)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now()
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), onupdate=func.now()
)
run: Mapped[NuzlockeRun] = relationship(back_populates="journal_entries")
boss_result: Mapped[BossResult | None] = relationship()
def __repr__(self) -> str:
return (
f"<JournalEntry(id={self.id}, run_id={self.run_id}, title='{self.title}')>"
)

View File

@@ -0,0 +1,16 @@
from sqlalchemy import SmallInteger, String
from sqlalchemy.orm import Mapped, mapped_column
from app.core.database import Base
class Move(Base):
__tablename__ = "moves"
id: Mapped[int] = mapped_column(primary_key=True)
name: Mapped[str] = mapped_column(String(50), unique=True)
introduced_gen: Mapped[int] = mapped_column(SmallInteger)
type: Mapped[str | None] = mapped_column(String(20))
def __repr__(self) -> str:
return f"<Move(id={self.id}, name='{self.name}', gen={self.introduced_gen})>"

View File

@@ -27,6 +27,7 @@ class NuzlockeRun(Base):
game: Mapped[Game] = relationship(back_populates="runs")
encounters: Mapped[list[Encounter]] = relationship(back_populates="run")
boss_results: Mapped[list[BossResult]] = relationship(back_populates="run")
journal_entries: Mapped[list[JournalEntry]] = relationship(back_populates="run")
def __repr__(self) -> str:
return (

View File

@@ -25,6 +25,17 @@ from app.schemas.game import (
RouteUpdate,
)
from app.schemas.genlocke import GenlockeCreate, GenlockeLegResponse, GenlockeResponse
from app.schemas.journal_entry import (
JournalEntryCreate,
JournalEntryResponse,
JournalEntryUpdate,
)
from app.schemas.move import (
AbilityResponse,
MoveResponse,
PaginatedAbilityResponse,
PaginatedMoveResponse,
)
from app.schemas.pokemon import (
BulkImportItem,
BulkImportResult,
@@ -46,6 +57,7 @@ from app.schemas.run import (
)
__all__ = [
"AbilityResponse",
"BossBattleCreate",
"BossBattleResponse",
"BossBattleUpdate",
@@ -68,6 +80,12 @@ __all__ = [
"GameDetailResponse",
"GameResponse",
"GameUpdate",
"JournalEntryCreate",
"JournalEntryResponse",
"JournalEntryUpdate",
"MoveResponse",
"PaginatedAbilityResponse",
"PaginatedMoveResponse",
"PokemonCreate",
"PokemonResponse",
"PokemonUpdate",

View File

@@ -0,0 +1,26 @@
from datetime import datetime
from uuid import UUID
from app.schemas.base import CamelModel
class JournalEntryCreate(CamelModel):
boss_result_id: int | None = None
title: str
body: str
class JournalEntryUpdate(CamelModel):
boss_result_id: int | None = None
title: str | None = None
body: str | None = None
class JournalEntryResponse(CamelModel):
id: UUID
run_id: int
boss_result_id: int | None
title: str
body: str
created_at: datetime
updated_at: datetime

View File

@@ -0,0 +1,28 @@
from app.schemas.base import CamelModel
class MoveResponse(CamelModel):
id: int
name: str
introduced_gen: int
type: str | None
class PaginatedMoveResponse(CamelModel):
items: list[MoveResponse]
total: int
limit: int
offset: int
class AbilityResponse(CamelModel):
id: int
name: str
introduced_gen: int
class PaginatedAbilityResponse(CamelModel):
items: list[AbilityResponse]
total: int
limit: int
offset: int

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -4,11 +4,13 @@ from sqlalchemy import delete, select, update
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.ability import Ability
from app.models.boss_battle import BossBattle
from app.models.boss_pokemon import BossPokemon
from app.models.encounter import Encounter
from app.models.evolution import Evolution
from app.models.game import Game
from app.models.move import Move
from app.models.pokemon import Pokemon
from app.models.route import Route
from app.models.route_encounter import RouteEncounter
@@ -484,3 +486,59 @@ async def upsert_evolutions(
await session.flush()
return count
async def upsert_moves(
session: AsyncSession,
moves: list[dict],
) -> int:
"""Upsert move records, return count of upserted rows."""
count = 0
for move in moves:
stmt = (
insert(Move)
.values(
name=move["name"],
introduced_gen=move["introduced_gen"],
type=move.get("type"),
)
.on_conflict_do_update(
index_elements=["name"],
set_={
"introduced_gen": move["introduced_gen"],
"type": move.get("type"),
},
)
)
await session.execute(stmt)
count += 1
await session.flush()
return count
async def upsert_abilities(
session: AsyncSession,
abilities: list[dict],
) -> int:
"""Upsert ability records, return count of upserted rows."""
count = 0
for ability in abilities:
stmt = (
insert(Ability)
.values(
name=ability["name"],
introduced_gen=ability["introduced_gen"],
)
.on_conflict_do_update(
index_elements=["name"],
set_={
"introduced_gen": ability["introduced_gen"],
},
)
)
await session.execute(stmt)
count += 1
await session.flush()
return count

View File

@@ -10,18 +10,22 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from app.core.database import async_session
from app.models.ability import Ability
from app.models.boss_battle import BossBattle
from app.models.boss_pokemon import BossPokemon
from app.models.evolution import Evolution
from app.models.game import Game
from app.models.move import Move
from app.models.pokemon import Pokemon
from app.models.route import Route
from app.models.route_encounter import RouteEncounter
from app.models.version_group import VersionGroup
from app.seeds.loader import (
upsert_abilities,
upsert_bosses,
upsert_evolutions,
upsert_games,
upsert_moves,
upsert_pokemon,
upsert_route_encounters,
upsert_routes,
@@ -69,6 +73,24 @@ async def seed(*, prune: bool = False):
dex_to_id = await upsert_pokemon(session, pokemon_data)
print(f"Pokemon: {len(dex_to_id)} upserted")
# 3a. Upsert Moves
moves_path = DATA_DIR / "moves.json"
if moves_path.exists():
moves_data = load_json("moves.json")
moves_count = await upsert_moves(session, moves_data)
print(f"Moves: {moves_count} upserted")
else:
print("No moves.json found, skipping moves")
# 3b. Upsert Abilities
abilities_path = DATA_DIR / "abilities.json"
if abilities_path.exists():
abilities_data = load_json("abilities.json")
abilities_count = await upsert_abilities(session, abilities_data)
print(f"Abilities: {abilities_count} upserted")
else:
print("No abilities.json found, skipping abilities")
# 4. Per version group: upsert routes once, then encounters per game
total_routes = 0
total_encounters = 0
@@ -199,6 +221,10 @@ async def verify():
vg_count = (await session.execute(select(func.count(VersionGroup.id)))).scalar()
games_count = (await session.execute(select(func.count(Game.id)))).scalar()
pokemon_count = (await session.execute(select(func.count(Pokemon.id)))).scalar()
moves_count = (await session.execute(select(func.count(Move.id)))).scalar()
abilities_count = (
await session.execute(select(func.count(Ability.id)))
).scalar()
routes_count = (await session.execute(select(func.count(Route.id)))).scalar()
enc_count = (
await session.execute(select(func.count(RouteEncounter.id)))
@@ -208,6 +234,8 @@ async def verify():
print(f"Version Groups: {vg_count}")
print(f"Games: {games_count}")
print(f"Pokemon: {pokemon_count}")
print(f"Moves: {moves_count}")
print(f"Abilities: {abilities_count}")
print(f"Routes: {routes_count}")
print(f"Route Encounters: {enc_count}")
print(f"Boss Battles: {boss_count}")