Skip to content

Commit

Permalink
Merge branch 'main' into feat/issue-95-add-text-search-for-post
Browse files Browse the repository at this point in the history
  • Loading branch information
osoken authored Aug 17, 2024
2 parents 2f13c99 + b32a749 commit 98294ca
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 14 deletions.
8 changes: 5 additions & 3 deletions api/birdxplorer_api/routers/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
LanguageIdentifier,
Note,
NoteId,
PaginationMeta,
ParticipantId,
Post,
PostId,
Expand All @@ -30,6 +31,7 @@ class NoteListResponse(BaseModel):

class PostListResponse(BaseModel):
data: List[Post]
meta: PaginationMeta


def str_to_twitter_timestamp(s: str) -> TwitterTimestamp:
Expand Down Expand Up @@ -95,8 +97,8 @@ def get_posts(
note_id: Union[List[NoteId], None] = Query(default=None),
created_at_start: Union[None, TwitterTimestamp, str] = Query(default=None),
created_at_end: Union[None, TwitterTimestamp, str] = Query(default=None),
offset: int = Query(default=0, ge=0), # 確保 offset 是非負的
limit: int = Query(default=100, gt=0, le=1000), # 確保 limit 在合理範圍內
offset: int = Query(default=0, ge=0),
limit: int = Query(default=100, gt=0, le=1000),
search_text: Union[None, str] = Query(default=None),
) -> PostListResponse:
posts = None
Expand Down Expand Up @@ -135,6 +137,6 @@ def get_posts(
if offset > 0:
prev_url = f"{base_url}?offset={prev_offset}&limit={limit}"

return PostListResponse(data=paginated_posts, meta={"next": next_url, "prev": prev_url})
return PostListResponse(data=paginated_posts, meta=PaginationMeta(next=next_url, prev=prev_url))

return router
39 changes: 29 additions & 10 deletions api/tests/routers/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,20 @@ def test_posts_get(client: TestClient, post_samples: List[Post]) -> None:
response = client.get("/api/v1/data/posts")
assert response.status_code == 200
res_json = response.json()
assert res_json == {"data": [json.loads(d.model_dump_json()) for d in post_samples]}
assert res_json == {
"data": [json.loads(d.model_dump_json()) for d in post_samples],
"meta": {"next": None, "prev": None},
}


def test_posts_get_limit_and_offset(client: TestClient, post_samples: List[Post]) -> None:
response = client.get("/api/v1/data/posts/?limit=2&offset=1")
assert response.status_code == 200
res_json = response.json()
assert res_json == {"data": [json.loads(d.model_dump_json()) for d in post_samples[1:3]]}
assert res_json == {
"data": [json.loads(d.model_dump_json()) for d in post_samples[1:3]],
"meta": {"next": None, "prev": "http://testserver/api/v1/data/posts?offset=0&limit=2"},
}


def test_posts_get_has_post_id_filter(client: TestClient, post_samples: List[Post]) -> None:
Expand All @@ -42,57 +48,70 @@ def test_posts_get_has_post_id_filter(client: TestClient, post_samples: List[Pos
"data": [
json.loads(post_samples[0].model_dump_json()),
json.loads(post_samples[2].model_dump_json()),
]
],
"meta": {"next": None, "prev": None},
}


def test_posts_get_has_note_id_filter(client: TestClient, post_samples: List[Post], note_samples: List[Note]) -> None:
response = client.get(f"/api/v1/data/posts/?noteId={','.join([n.note_id for n in note_samples])}")
assert response.status_code == 200
res_json = response.json()
assert res_json == {"data": [json.loads(post_samples[0].model_dump_json())]}
assert res_json == {"data": [json.loads(post_samples[0].model_dump_json())], "meta": {"next": None, "prev": None}}


def test_posts_get_has_created_at_filter_start_and_end(client: TestClient, post_samples: List[Post]) -> None:
response = client.get("/api/v1/data/posts/?createdAtStart=2006-7-25 00:00:00&createdAtEnd=2006-7-30 23:59:59")
assert response.status_code == 200
res_json = response.json()
assert res_json == {"data": [json.loads(post_samples[1].model_dump_json())]}
assert res_json == {"data": [json.loads(post_samples[1].model_dump_json())], "meta": {"next": None, "prev": None}}


def test_posts_get_has_created_at_filter_start(client: TestClient, post_samples: List[Post]) -> None:
response = client.get("/api/v1/data/posts/?createdAtStart=2006-7-25 00:00:00")
assert response.status_code == 200
res_json = response.json()
assert res_json == {"data": [json.loads(post_samples[i].model_dump_json()) for i in (1, 2)]}
assert res_json == {
"data": [json.loads(post_samples[i].model_dump_json()) for i in (1, 2)],
"meta": {"next": None, "prev": None},
}


def test_posts_get_has_created_at_filter_end(client: TestClient, post_samples: List[Post]) -> None:
response = client.get("/api/v1/data/posts/?createdAtEnd=2006-7-30 00:00:00")
assert response.status_code == 200
res_json = response.json()
assert res_json == {"data": [json.loads(post_samples[i].model_dump_json()) for i in (0, 1)]}
assert res_json == {
"data": [json.loads(post_samples[i].model_dump_json()) for i in (0, 1)],
"meta": {"next": None, "prev": None},
}


def test_posts_get_created_at_range_filter_accepts_integer(client: TestClient, post_samples: List[Post]) -> None:
response = client.get("/api/v1/data/posts/?createdAtStart=1153921700000&createdAtEnd=1154921800000")
assert response.status_code == 200
res_json = response.json()
assert res_json == {"data": [json.loads(post_samples[1].model_dump_json())]}
assert res_json == {"data": [json.loads(post_samples[1].model_dump_json())], "meta": {"next": None, "prev": None}}


def test_posts_get_created_at_start_filter_accepts_integer(client: TestClient, post_samples: List[Post]) -> None:
response = client.get("/api/v1/data/posts/?createdAtStart=1153921700000")
assert response.status_code == 200
res_json = response.json()
assert res_json == {"data": [json.loads(post_samples[i].model_dump_json()) for i in (1, 2)]}
assert res_json == {
"data": [json.loads(post_samples[i].model_dump_json()) for i in (1, 2)],
"meta": {"next": None, "prev": None},
}


def test_posts_get_created_at_end_filter_accepts_integer(client: TestClient, post_samples: List[Post]) -> None:
response = client.get("/api/v1/data/posts/?createdAtEnd=1154921800000")
assert response.status_code == 200
res_json = response.json()
assert res_json == {"data": [json.loads(post_samples[i].model_dump_json()) for i in (0, 1)]}
assert res_json == {
"data": [json.loads(post_samples[i].model_dump_json()) for i in (0, 1)],
"meta": {"next": None, "prev": None},
}


def test_posts_get_timestamp_out_of_range(client: TestClient, post_samples: List[Post]) -> None:
Expand Down
7 changes: 6 additions & 1 deletion common/birdxplorer_common/models.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from abc import ABC, abstractmethod
from datetime import datetime, timezone
from enum import Enum
from typing import Any, Dict, List, Literal, Type, TypeAlias, TypeVar, Union
from typing import Any, Dict, List, Literal, Optional, Type, TypeAlias, TypeVar, Union

from pydantic import BaseModel as PydanticBaseModel
from pydantic import ConfigDict, GetCoreSchemaHandler, HttpUrl, TypeAdapter
Expand Down Expand Up @@ -687,3 +687,8 @@ class Post(BaseModel):
like_count: NonNegativeInt
repost_count: NonNegativeInt
impression_count: NonNegativeInt


class PaginationMeta(BaseModel):
next: Optional[HttpUrl] = None
prev: Optional[HttpUrl] = None

0 comments on commit 98294ca

Please sign in to comment.