|
1 | 1 | """Catalogs extension.""" |
2 | 2 |
|
3 | 3 | from typing import List, Optional, Type |
| 4 | +from urllib.parse import urlparse |
4 | 5 |
|
5 | 6 | import attr |
6 | | -from fastapi import APIRouter, FastAPI, HTTPException, Request |
| 7 | +from fastapi import APIRouter, FastAPI, HTTPException, Query, Request |
7 | 8 | from fastapi.responses import JSONResponse |
8 | 9 | from starlette.responses import Response |
9 | 10 |
|
@@ -122,21 +123,37 @@ def register(self, app: FastAPI, settings=None) -> None: |
122 | 123 |
|
123 | 124 | app.include_router(self.router, tags=["Catalogs"]) |
124 | 125 |
|
125 | | - async def catalogs(self, request: Request) -> Catalog: |
| 126 | + async def catalogs( |
| 127 | + self, |
| 128 | + request: Request, |
| 129 | + limit: Optional[int] = Query( |
| 130 | + 10, |
| 131 | + ge=1, |
| 132 | + description=( |
| 133 | + "The maximum number of catalogs to return (page size). Defaults to 10." |
| 134 | + ), |
| 135 | + ), |
| 136 | + token: Optional[str] = Query( |
| 137 | + None, |
| 138 | + description="Pagination token for the next page of results", |
| 139 | + ), |
| 140 | + ) -> Catalog: |
126 | 141 | """Get root catalog with links to all catalogs. |
127 | 142 |
|
128 | 143 | Args: |
129 | 144 | request: Request object. |
| 145 | + limit: The maximum number of catalogs to return (page size). Defaults to 10. |
| 146 | + token: Pagination token for the next page of results. |
130 | 147 |
|
131 | 148 | Returns: |
132 | 149 | Root catalog containing child links to all catalogs in the database. |
133 | 150 | """ |
134 | 151 | base_url = str(request.base_url) |
135 | 152 |
|
136 | | - # Get all catalogs from database |
| 153 | + # Get all catalogs from database with pagination |
137 | 154 | catalogs, _, _ = await self.client.database.get_all_catalogs( |
138 | | - token=None, |
139 | | - limit=1000, # Large limit to get all catalogs |
| 155 | + token=token, |
| 156 | + limit=limit, |
140 | 157 | request=request, |
141 | 158 | sort=[{"field": "id", "direction": "asc"}], |
142 | 159 | ) |
@@ -258,15 +275,26 @@ async def get_catalog_collections( |
258 | 275 | if hasattr(catalog, "links") and catalog.links: |
259 | 276 | for link in catalog.links: |
260 | 277 | if link.get("rel") in ["child", "item"]: |
261 | | - # Extract collection ID from href |
| 278 | + # Extract collection ID from href using proper URL parsing |
262 | 279 | href = link.get("href", "") |
263 | | - # Look for patterns like /collections/{id} or collections/{id} |
264 | | - if "/collections/" in href: |
265 | | - collection_id = href.split("/collections/")[-1].split("/")[ |
266 | | - 0 |
267 | | - ] |
268 | | - if collection_id and collection_id not in collection_ids: |
269 | | - collection_ids.append(collection_id) |
| 280 | + if href: |
| 281 | + try: |
| 282 | + parsed_url = urlparse(href) |
| 283 | + path = parsed_url.path |
| 284 | + # Look for patterns like /collections/{id} or collections/{id} |
| 285 | + if "/collections/" in path: |
| 286 | + # Split by /collections/ and take the last segment |
| 287 | + path_parts = path.split("/collections/") |
| 288 | + if len(path_parts) > 1: |
| 289 | + collection_id = path_parts[1].split("/")[0] |
| 290 | + if ( |
| 291 | + collection_id |
| 292 | + and collection_id not in collection_ids |
| 293 | + ): |
| 294 | + collection_ids.append(collection_id) |
| 295 | + except Exception: |
| 296 | + # If URL parsing fails, skip this link |
| 297 | + continue |
270 | 298 |
|
271 | 299 | # Fetch the collections |
272 | 300 | collections = [] |
|
0 commit comments