-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
13 changed files
with
759 additions
and
10 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -21,3 +21,5 @@ dev-env/ | |
/data/ | ||
tmp/ | ||
.task/ | ||
node_modules/ | ||
package-lock.json |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
from pathlib import Path | ||
from typing import Any, Iterator | ||
|
||
from ariadne import ObjectType, QueryType, gql, make_executable_schema | ||
from ariadne.asgi import GraphQL | ||
from sqlalchemy import select | ||
from starlette.applications import Starlette | ||
from starlette.routing import Mount | ||
|
||
from chii.compat import phpseralize | ||
from chii.const import CollectionType | ||
from chii.db import sa | ||
from chii.db.tables import ChiiTimeline, ChiiTimeline_column_cat, ChiiTimeline_column_id | ||
from chii.timeline import TimelineCat | ||
from gql.model import CollectTimeline | ||
from gql.rules import depth_limit_validator | ||
|
||
# Define types using Schema Definition Language (https://graphql.org/learn/schema/) | ||
# Wrapping string in gql function provides validation and better error traceback | ||
type_defs = gql( | ||
Path(__file__, "..", "schema.graphql").resolve().read_text(encoding="utf8") | ||
) | ||
|
||
CreateSession = sa.async_session_maker() | ||
|
||
# Map resolver functions to Query fields using QueryType | ||
gql_query = QueryType() | ||
|
||
|
||
# Resolvers are simple python functions | ||
@gql_query.field("timeline_collection") | ||
async def timeline_collection(*_: Any) -> list[CollectTimeline]: | ||
async with CreateSession() as session: | ||
rows: Iterator[ChiiTimeline] = await session.scalars( | ||
select(ChiiTimeline) | ||
.where(ChiiTimeline_column_cat == TimelineCat.Subject) | ||
.order_by(ChiiTimeline_column_id.desc()) | ||
.limit(10) | ||
) | ||
|
||
result = [] | ||
for row in rows: | ||
meme = phpseralize.loads(row.memo.encode()) | ||
if not row.batch: | ||
result.append( | ||
CollectTimeline( | ||
id=row.id, | ||
action=CollectionType.wish, | ||
user_id=row.uid, | ||
subject_id=[int(meme["subject_id"])], | ||
created_at=row.created_at, | ||
) | ||
) | ||
else: | ||
result.append( | ||
CollectTimeline( | ||
id=row.id, | ||
action=CollectionType.wish, | ||
user_id=row.uid, | ||
subject_id=[int(x) for x in meme], | ||
created_at=row.created_at, | ||
) | ||
) | ||
|
||
return result | ||
|
||
|
||
# Map resolver functions to custom type fields using ObjectType | ||
gql_collect_timeline = ObjectType("CollectTimeline") | ||
|
||
# Create executable GraphQL schema | ||
schema = make_executable_schema(type_defs, gql_query, gql_collect_timeline) | ||
|
||
app = Starlette( | ||
debug=True, | ||
routes=[ | ||
Mount( | ||
"/graphql", | ||
GraphQL( | ||
schema, | ||
debug=True, | ||
validation_rules=[depth_limit_validator(max_depth=5)], | ||
), | ||
), | ||
], | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
from typing import TypedDict | ||
|
||
|
||
class CollectTimeline(TypedDict): | ||
id: int | ||
action: int | ||
user_id: int | ||
subject_id: list[int] | ||
created_at: int |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,120 @@ | ||
from typing import Dict, Tuple | ||
|
||
from ariadne.contrib.tracing.utils import is_introspection_key | ||
from graphql import ( | ||
ASTValidationRule, | ||
DefinitionNode, | ||
FieldNode, | ||
FragmentDefinitionNode, | ||
FragmentSpreadNode, | ||
GraphQLError, | ||
InlineFragmentNode, | ||
Node, | ||
OperationDefinitionNode, | ||
ValidationContext, | ||
) | ||
from graphql.validation.validate import ValidationAbortedError | ||
|
||
|
||
def depth_limit_validator(max_depth: int): | ||
class DepthLimitValidator(ASTValidationRule): | ||
def __init__(self, validation_context: ValidationContext): | ||
document = validation_context.document | ||
definitions = document.definitions | ||
|
||
fragments = get_fragments(definitions) | ||
queries = get_queries_and_mutations(definitions) | ||
query_depths = {} | ||
|
||
for name in queries: | ||
query_depths[name] = determine_depth( | ||
node=queries[name], | ||
fragments=fragments, | ||
depth_so_far=0, | ||
max_depth=max_depth, | ||
context=validation_context, | ||
operation_name=name, | ||
) | ||
super().__init__(validation_context) | ||
|
||
return DepthLimitValidator | ||
|
||
|
||
def get_fragments( | ||
definitions: Tuple[DefinitionNode, ...], | ||
) -> Dict[str, FragmentDefinitionNode]: | ||
fragments = {} | ||
for definition in definitions: | ||
if isinstance(definition, FragmentDefinitionNode): | ||
fragments[definition.name.value] = definition | ||
return fragments | ||
|
||
|
||
def get_queries_and_mutations( | ||
definitions: Tuple[DefinitionNode, ...], | ||
) -> Dict[str, OperationDefinitionNode]: | ||
operations = {} | ||
|
||
for definition in definitions: | ||
if isinstance(definition, OperationDefinitionNode): | ||
operation = definition.name.value if definition.name else "anonymous" | ||
operations[operation] = definition | ||
return operations | ||
|
||
|
||
def determine_depth( | ||
node: Node, | ||
fragments: Dict[str, FragmentDefinitionNode], | ||
depth_so_far: int, | ||
max_depth: int, | ||
context: ValidationContext, | ||
operation_name: str, | ||
) -> int: | ||
if depth_so_far > max_depth: | ||
context.report_error( | ||
GraphQLError( | ||
f"'{operation_name}' exceeds maximum operation depth of {max_depth}.", | ||
[node], | ||
) | ||
) | ||
raise ValidationAbortedError | ||
if isinstance(node, FieldNode): | ||
should_ignore = is_introspection_key(node.name.value) | ||
|
||
if should_ignore or not node.selection_set: | ||
return 0 | ||
return 1 + max( | ||
determine_depth( | ||
node=selection, | ||
fragments=fragments, | ||
depth_so_far=depth_so_far + 1, | ||
max_depth=max_depth, | ||
context=context, | ||
operation_name=operation_name, | ||
) | ||
for selection in node.selection_set.selections | ||
) | ||
if isinstance(node, FragmentSpreadNode): | ||
return determine_depth( | ||
node=fragments[node.name.value], | ||
fragments=fragments, | ||
depth_so_far=depth_so_far, | ||
max_depth=max_depth, | ||
context=context, | ||
operation_name=operation_name, | ||
) | ||
if isinstance( | ||
node, (InlineFragmentNode, FragmentDefinitionNode, OperationDefinitionNode) | ||
): | ||
return max( | ||
determine_depth( | ||
node=selection, | ||
fragments=fragments, | ||
depth_so_far=depth_so_far, | ||
max_depth=max_depth, | ||
context=context, | ||
operation_name=operation_name, | ||
) | ||
for selection in node.selection_set.selections | ||
) | ||
raise Exception(f"Depth crawler cannot handle: {node.kind}.") # pragma: no cover |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
type Query { | ||
timeline_collection:[CollectTimeline!]! | ||
} | ||
|
||
type CollectTimeline { | ||
id: Int! | ||
action: Int! | ||
user_id: Int! | ||
subject_id: [Int!]! | ||
# unix timestamp in seconds | ||
created_at: Int! | ||
} |
Oops, something went wrong.