diff --git a/app/api/dependencies/auth_utils.py b/app/api/dependencies/auth_utils.py index dbd4720..b035dad 100644 --- a/app/api/dependencies/auth_utils.py +++ b/app/api/dependencies/auth_utils.py @@ -14,7 +14,7 @@ # Password Hashing password_context = CryptContext(schemes=["bcrypt"], deprecated="auto") # oauth2 scheme -oauth2_scheme = OAuth2PasswordBearer(tokenUrl='api/u/login', scheme_name="JWT") +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="api/u/login", scheme_name="JWT") def get_hashed_password(password: str) -> str: @@ -25,31 +25,49 @@ def verify_password(password: str, hashed_password: str) -> bool: return password_context.verify(password, hashed_password) -def create_access_token(subject: Dict[str, Any], expires_delta: timedelta = None) -> str: +def create_access_token( + subject: Dict[str, Any], expires_delta: timedelta = None +) -> str: if expires_delta is not None: expires_delta = datetime.utcnow() + expires_delta else: - expires_delta = datetime.utcnow() + timedelta(minutes=settings.access_token_expire_min) + expires_delta = datetime.utcnow() + timedelta( + minutes=settings.access_token_expire_min + ) - to_encode = {"exp": expires_delta, "id": subject.get("id"), "username": subject.get("username")} + to_encode = { + "exp": expires_delta, + "id": subject.get("id"), + "username": subject.get("username"), + } return jwt.encode(to_encode, settings.jwt_secret_key, settings.algorithm) -def create_refresh_token(subject: Dict[str, Any], expires_delta: timedelta = None) -> str: +def create_refresh_token( + subject: Dict[str, Any], expires_delta: timedelta = None +) -> str: if expires_delta is not None: expires_delta = datetime.utcnow() + expires_delta else: - expires_delta = datetime.utcnow() + timedelta(minutes=settings.refresh_token_expire_min) + expires_delta = datetime.utcnow() + timedelta( + minutes=settings.refresh_token_expire_min + ) - to_encode = {"exp": expires_delta, "id": subject.get("id"), "username": subject.get("username")} + to_encode = { + "exp": expires_delta, + "id": subject.get("id"), + "username": subject.get("username"), + } return jwt.encode(to_encode, settings.jwt_secret_key, settings.algorithm) def verify_access_token(token: str, credentials_exception): try: - payload = jwt.decode(token, settings.jwt_secret_key, algorithms=[settings.algorithm]) + payload = jwt.decode( + token, settings.jwt_secret_key, algorithms=[settings.algorithm] + ) uid = payload.get("id") username = payload.get("username") if username is None: @@ -63,10 +81,7 @@ def verify_access_token(token: str, credentials_exception): def get_current_user(token: str = Depends(oauth2_scheme)): credentials_exception = http_401( - msg="Could not validate credentials", - headers={ - "WWW-Authenticate": "Bearer" - } + msg="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"} ) return verify_access_token(token=token, credentials_exception=credentials_exception) diff --git a/app/api/dependencies/constants.py b/app/api/dependencies/constants.py index a60ceb3..eb69f0e 100644 --- a/app/api/dependencies/constants.py +++ b/app/api/dependencies/constants.py @@ -1,28 +1,28 @@ SUPPORTED_FILE_TYPES = { - 'image/jpeg': 'jpg', - 'image/png': 'png', - 'image/gif': 'gif', - 'image/bmp': 'bmp', - 'image/tiff': 'tiff', - 'application/pdf': 'pdf', - 'text/plain': 'txt', - 'application/msword': 'doc', - 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': 'docx', - 'application/vnd.ms-excel': 'xls', - 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': 'xlsx', - 'application/vnd.ms-powerpoint': 'ppt', - 'application/vnd.openxmlformats-officedocument.presentationml.presentation': 'pptx', - 'application/zip': 'zip', - 'application/x-gzip': 'gzip', - 'application/x-tar': 'tar', - 'application/x-bzip2': 'bz2', - 'application/x-7z-compressed': '7z', - 'application/xml': 'xml', - 'application/json': 'json', - 'video/mp4': 'mp4', - 'video/mpeg': 'mpeg', - 'video/quicktime': 'mov', - 'audio/mpeg': 'mp3', - 'audio/wav': 'wav', - 'audio/x-ms-wma': 'wma', + "image/jpeg": "jpg", + "image/png": "png", + "image/gif": "gif", + "image/bmp": "bmp", + "image/tiff": "tiff", + "application/pdf": "pdf", + "text/plain": "txt", + "application/msword": "doc", + "application/vnd.openxmlformats-officedocument.wordprocessingml.document": "docx", + "application/vnd.ms-excel": "xls", + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": "xlsx", + "application/vnd.ms-powerpoint": "ppt", + "application/vnd.openxmlformats-officedocument.presentationml.presentation": "pptx", + "application/zip": "zip", + "application/x-gzip": "gzip", + "application/x-tar": "tar", + "application/x-bzip2": "bz2", + "application/x-7z-compressed": "7z", + "application/xml": "xml", + "application/json": "json", + "video/mp4": "mp4", + "video/mpeg": "mpeg", + "video/quicktime": "mov", + "audio/mpeg": "mp3", + "audio/wav": "wav", + "audio/x-ms-wma": "wma", } diff --git a/app/api/dependencies/mail_service.py b/app/api/dependencies/mail_service.py index 450389e..6f34373 100644 --- a/app/api/dependencies/mail_service.py +++ b/app/api/dependencies/mail_service.py @@ -11,7 +11,9 @@ from app.core.exceptions import http_500 -def mail_service(mail_to: str, subject: str, content: str, file_path: str = None) -> None: +def mail_service( + mail_to: str, subject: str, content: str, file_path: str = None +) -> None: port = settings.smtp_port # For starttls smtp_server = settings.smtp_server sender_email = settings.email @@ -20,8 +22,8 @@ def mail_service(mail_to: str, subject: str, content: str, file_path: str = None # Creating Multipart message and headers message = MIMEMultipart() - message['Subject'] = subject - message.attach(MIMEText(content, _subtype='plain')) + message["Subject"] = subject + message.attach(MIMEText(content, _subtype="plain")) # Open file in binary mode if file_path is not None: @@ -36,7 +38,7 @@ def mail_service(mail_to: str, subject: str, content: str, file_path: str = None # header as attachment part.add_header( "Content-Disposition", - f"attachment; filename= {os.path.basename(file_path)}" + f"attachment; filename= {os.path.basename(file_path)}", ) message.attach(part) @@ -50,6 +52,4 @@ def mail_service(mail_to: str, subject: str, content: str, file_path: str = None server.login(sender_email, password) server.sendmail(sender_email, receiver_email, message.as_string()) except Exception as e: - raise http_500( - msg="There was some error sending email..." - ) from e + raise http_500(msg="There was some error sending email...") from e diff --git a/app/api/router.py b/app/api/router.py index ea237cf..24e12b7 100644 --- a/app/api/router.py +++ b/app/api/router.py @@ -1,9 +1,13 @@ from fastapi import APIRouter from app.api.routes.auth.auth import router as auth_router -from app.api.routes.documents.documents_metadata import router as documents_metadata_router +from app.api.routes.documents.documents_metadata import ( + router as documents_metadata_router, +) from app.api.routes.documents.document import router as documents_router -from app.api.routes.documents.document_organization import router as document_organization_router +from app.api.routes.documents.document_organization import ( + router as document_organization_router, +) from app.api.routes.documents.document_sharing import router as document_sharing_router from app.api.routes.documents.notify import router as notify_router diff --git a/app/api/routes/auth/auth.py b/app/api/routes/auth/auth.py index 128b789..d19a84f 100644 --- a/app/api/routes/auth/auth.py +++ b/app/api/routes/auth/auth.py @@ -14,11 +14,10 @@ response_model=UserOut, status_code=status.HTTP_201_CREATED, name="signup", - summary="Create new user" + summary="Create new user", ) async def signup( - data: UserAuth, - repository: AuthRepository = Depends(get_repository(AuthRepository)) + data: UserAuth, repository: AuthRepository = Depends(get_repository(AuthRepository)) ): return await repository.signup(userdata=data) @@ -28,11 +27,11 @@ async def signup( "/login", status_code=status.HTTP_200_OK, name="login", - summary="Create access and refresh tokens for user" + summary="Create access and refresh tokens for user", ) async def login( form_data: OAuth2PasswordRequestForm = Depends(), - repository: AuthRepository = Depends(get_repository(AuthRepository)) + repository: AuthRepository = Depends(get_repository(AuthRepository)), ): return await repository.login(ipdata=form_data) @@ -43,7 +42,7 @@ async def login( status_code=status.HTTP_200_OK, response_model=TokenData, name="get_user_data", - summary="Get details of currently logged in user" + summary="Get details of currently logged in user", ) async def get_me(user: TokenData = Depends(get_current_user)): diff --git a/app/api/routes/documents/document.py b/app/api/routes/documents/document.py index 52f36a8..0192b9d 100644 --- a/app/api/routes/documents/document.py +++ b/app/api/routes/documents/document.py @@ -9,7 +9,10 @@ from app.api.dependencies.repositories import get_repository from app.core.exceptions import http_400, http_404 from app.db.repositories.auth.auth import AuthRepository -from app.db.repositories.documents.documents import DocumentRepository, perm_delete as perm_delete_file +from app.db.repositories.documents.documents import ( + DocumentRepository, + perm_delete as perm_delete_file, +) from app.db.repositories.documents.documents_metadata import DocumentMetadataRepository from app.schemas.auth.bands import TokenData from app.schemas.documents.documents_metadata import DocumentMetadataRead @@ -22,7 +25,7 @@ "/upload", response_model=None, status_code=status.HTTP_201_CREATED, - name="upload_document" + name="upload_document", ) async def upload( files: List[UploadFile] = File(...), @@ -32,9 +35,8 @@ async def upload( get_repository(DocumentMetadataRepository) ), user_repository: AuthRepository = Depends(get_repository(AuthRepository)), - user: TokenData = Depends(get_current_user) + user: TokenData = Depends(get_current_user), ) -> Union[List[DocumentMetadataRead], List[Dict[str, str]]]: - """ Uploads a document to the specified folder. @@ -58,9 +60,7 @@ async def upload( """ if not files: - raise http_400( - msg="No input files provided..." - ) + raise http_400(msg="No input files provided...") responses = [] for file in files: @@ -69,33 +69,38 @@ async def upload( user_repo=user_repository, file=file, folder=folder, - user=user + user=user, ) if response["response"] == "file_added": - responses.append(await metadata_repository.upload(document_upload=response["upload"])) + responses.append( + await metadata_repository.upload(document_upload=response["upload"]) + ) elif response["response"] == "file_updated": - responses.append(await metadata_repository.patch( - document=response["upload"]["name"], - document_patch=response["upload"], - owner=user, - user_repo=user_repository, - is_owner=response["is_owner"] - )) + responses.append( + await metadata_repository.patch( + document=response["upload"]["name"], + document_patch=response["upload"], + owner=user, + user_repo=user_repository, + is_owner=response["is_owner"], + ) + ) return responses @router.get( "/file/{file_name}/download", status_code=status.HTTP_200_OK, - name="download_document" + name="download_document", ) async def download( file_name: str, repository: DocumentRepository = Depends(DocumentRepository), - metadata_repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), + metadata_repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), user: TokenData = Depends(get_current_user), ) -> object: - """ Downloads a document with the specified file name. @@ -114,30 +119,29 @@ async def download( """ if not file_name: - raise http_400( - msg="No file name..." - ) + raise http_400(msg="No file name...") try: - get_document_metadata = dict(await metadata_repository.get(document=file_name, owner=user)) + get_document_metadata = dict( + await metadata_repository.get(document=file_name, owner=user) + ) - return await repository.download(s3_url=get_document_metadata["s3_url"], name=get_document_metadata["name"]) + return await repository.download( + s3_url=get_document_metadata["s3_url"], name=get_document_metadata["name"] + ) except Exception as e: - raise http_404( - msg=f"No file with {file_name}" - ) from e + raise http_404(msg=f"No file with {file_name}") from e @router.delete( - "/{file_name}", - status_code=status.HTTP_204_NO_CONTENT, - name="add_to_bin" + "/{file_name}", status_code=status.HTTP_204_NO_CONTENT, name="add_to_bin" ) async def add_to_bin( - file_name: str, - metadata_repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), - user: TokenData = Depends(get_current_user), + file_name: str, + metadata_repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + user: TokenData = Depends(get_current_user), ) -> None: - """ Adds a document to the bin for deletion. @@ -160,10 +164,11 @@ async def add_to_bin( name="list_of_bin", ) async def list_bin( - metadata_repo: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), - owner: TokenData = Depends(get_current_user) + metadata_repo: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + owner: TokenData = Depends(get_current_user), ) -> Dict[str, List[Row | Row] | int]: - """ List bin. @@ -182,15 +187,16 @@ async def list_bin( @router.delete( "/trash/{file_name}", status_code=status.HTTP_204_NO_CONTENT, - name="permanently_delete_doc" + name="permanently_delete_doc", ) async def perm_delete( - file_name: str = None, - delete_all: bool = False, - metadata_repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), - user: TokenData = Depends(get_current_user), + file_name: str = None, + delete_all: bool = False, + metadata_repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + user: TokenData = Depends(get_current_user), ) -> None: - """ Permanently deletes a document. @@ -214,13 +220,11 @@ async def perm_delete( file=file_name, delete_all=delete_all, meta_repo=metadata_repository, - user=user + user=user, ) except Exception as e: - raise http_404( - msg=f"No file with {file_name}" - ) from e + raise http_404(msg=f"No file with {file_name}") from e @router.post( @@ -230,11 +234,12 @@ async def perm_delete( name="restore_from_bin", ) async def restore_bin( - file: str, - metadata_repo: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), - user: TokenData = Depends(get_current_user) + file: str, + metadata_repo: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + user: TokenData = Depends(get_current_user), ) -> DocumentMetadataRead: - """ Restore bin. @@ -257,10 +262,11 @@ async def restore_bin( name="empty_trash", ) async def empty_trash( - metadata_repo: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), - user: TokenData = Depends(get_current_user) + metadata_repo: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + user: TokenData = Depends(get_current_user), ) -> None: - """ Deletes all documents in the trash bin for the authenticated user. @@ -278,15 +284,16 @@ async def empty_trash( @router.get( "/preview/{document}", status_code=status.HTTP_204_NO_CONTENT, - name="preview_document" + name="preview_document", ) async def get_document_preview( - document: Union[str, UUID], - repository: DocumentRepository = Depends(DocumentRepository), - metadata_repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), - user: TokenData = Depends(get_current_user) + document: Union[str, UUID], + repository: DocumentRepository = Depends(DocumentRepository), + metadata_repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + user: TokenData = Depends(get_current_user), ) -> FileResponse: - """ Get the preview of a document. @@ -305,17 +312,13 @@ async def get_document_preview( """ if not document: - raise http_404( - msg="Enter document id or name." - ) + raise http_404(msg="Enter document id or name.") try: - get_document_metadata = dict(await metadata_repository.get(document=document, owner=user)) + get_document_metadata = dict( + await metadata_repository.get(document=document, owner=user) + ) return await repository.preview(document=get_document_metadata) except TypeError as e: - raise http_404( - msg="Document does not exists." - ) from e + raise http_404(msg="Document does not exists.") from e except ValueError as e: - raise http_400( - msg="File type is not supported for preview" - ) from e + raise http_400(msg="File type is not supported for preview") from e diff --git a/app/api/routes/documents/document_organization.py b/app/api/routes/documents/document_organization.py index c75932c..4b62a46 100644 --- a/app/api/routes/documents/document_organization.py +++ b/app/api/routes/documents/document_organization.py @@ -23,10 +23,11 @@ async def search_document( file_types: str = None, doc_status: str = None, repository: DocumentOrgRepository = Depends(DocumentOrgRepository), - repository_metadata: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), + repository_metadata: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), user: TokenData = Depends(get_current_user), ): - """ Searches for documents based on specified criteria. @@ -46,7 +47,9 @@ async def search_document( List[DocumentMetadataRead] or List[Dict[str, Any]]: The list of matching documents. """ - doc_list = await repository_metadata.doc_list(limit=limit, offset=offset, owner=user) + doc_list = await repository_metadata.doc_list( + limit=limit, offset=offset, owner=user + ) doc_list = doc_list[f"documents of {user.username}"] if tag is None and category is None and file_types is None and doc_status is None: return doc_list @@ -56,5 +59,5 @@ async def search_document( tags=tag, categories=category, file_types=file_types, - status=doc_status + status=doc_status, ) diff --git a/app/api/routes/documents/document_sharing.py b/app/api/routes/documents/document_sharing.py index 065387b..73a1424 100644 --- a/app/api/routes/documents/document_sharing.py +++ b/app/api/routes/documents/document_sharing.py @@ -20,24 +20,21 @@ @router.post( - "/share-link/{document}", - status_code=status.HTTP_200_OK, - name="share_document_link" + "/share-link/{document}", status_code=status.HTTP_200_OK, name="share_document_link" ) async def share_link_document( document: Union[str, UUID], share_request: SharingRequest, - repository: DocumentSharingRepository = Depends(get_repository(DocumentSharingRepository)), - auth_repository: AuthRepository = Depends( - get_repository(AuthRepository) + repository: DocumentSharingRepository = Depends( + get_repository(DocumentSharingRepository) ), + auth_repository: AuthRepository = Depends(get_repository(AuthRepository)), metadata_repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), notify_repository: NotifyRepo = Depends(get_repository(NotifyRepo)), - user: TokenData = Depends(get_current_user) + user: TokenData = Depends(get_current_user), ): - """ Shares a document link with another user, sends mail and notifies the receiver. @@ -79,31 +76,26 @@ async def share_link_document( # send a notification to the receiver await notify_repository.notify( - user=user, receivers=share_to, + user=user, + receivers=share_to, filename=doc.__dict__["name"], auth_repo=auth_repository, ) - return { - "personal_url": pre_signed_url, - "share_this": shareable_link - } + return {"personal_url": pre_signed_url, "share_this": shareable_link} except KeyError as e: - raise http_404( - msg=f"No doc: {document}" - ) from e + raise http_404(msg=f"No doc: {document}") from e @router.get("/doc/{url_id}", tags=["Document Sharing"]) async def redirect_to_share( - url_id: str, - repository: DocumentSharingRepository = Depends(get_repository( - DocumentSharingRepository) - ), - user: TokenData = Depends(get_current_user) + url_id: str, + repository: DocumentSharingRepository = Depends( + get_repository(DocumentSharingRepository) + ), + user: TokenData = Depends(get_current_user), ): - """ Redirects to a shared document URL. @@ -122,27 +114,22 @@ async def redirect_to_share( return RedirectResponse(redirect_url) -@router.post( - "/share/{document}", - status_code=status.HTTP_200_OK, - name="share_document" -) +@router.post("/share/{document}", status_code=status.HTTP_200_OK, name="share_document") async def share_document( - document: Union[str, UUID], - share_request: SharingRequest, - notify: bool = True, - repository: DocumentSharingRepository = Depends( - get_repository(DocumentSharingRepository) - ), - document_repo: DocumentRepository = Depends(DocumentRepository), - metadata_repo: DocumentMetadataRepository = Depends( - get_repository(DocumentMetadataRepository) - ), - notify_repo: NotifyRepo = Depends(get_repository(NotifyRepo)), - auth_repo: AuthRepository = Depends(get_repository(AuthRepository)), - user: TokenData = Depends(get_current_user), + document: Union[str, UUID], + share_request: SharingRequest, + notify: bool = True, + repository: DocumentSharingRepository = Depends( + get_repository(DocumentSharingRepository) + ), + document_repo: DocumentRepository = Depends(DocumentRepository), + metadata_repo: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + notify_repo: NotifyRepo = Depends(get_repository(NotifyRepo)), + auth_repo: AuthRepository = Depends(get_repository(AuthRepository)), + user: TokenData = Depends(get_current_user), ) -> None: - """ Share a document with other users, and notifies if notify is set to True (default). @@ -167,11 +154,11 @@ async def share_document( """ if not document: - raise http_404( - msg="Enter document id or UUID." - ) + raise http_404(msg="Enter document id or UUID.") try: - get_document_metadata = dict(await metadata_repo.get(document=document, owner=user)) + get_document_metadata = dict( + await metadata_repo.get(document=document, owner=user) + ) key = await get_key(s3_url=get_document_metadata["s3_url"]) file = await document_repo.get_s3_file_object_body(key=key) @@ -184,7 +171,7 @@ async def share_document( notify=notify, owner=user, notify_repo=notify_repo, - auth_repo=auth_repo + auth_repo=auth_repo, ) except Exception as e: raise http_404() from e diff --git a/app/api/routes/documents/documents_metadata.py b/app/api/routes/documents/documents_metadata.py index b9f509d..10e7c08 100644 --- a/app/api/routes/documents/documents_metadata.py +++ b/app/api/routes/documents/documents_metadata.py @@ -10,7 +10,10 @@ from app.db.repositories.documents.documents_metadata import DocumentMetadataRepository from app.schemas.auth.bands import TokenData from app.schemas.documents.bands import DocumentMetadataPatch -from app.schemas.documents.documents_metadata import DocumentMetadataCreate, DocumentMetadataRead +from app.schemas.documents.documents_metadata import ( + DocumentMetadataCreate, + DocumentMetadataRead, +) router = APIRouter(tags=["Document MetaData"]) @@ -24,10 +27,11 @@ ) async def upload_document_metadata( document_upload: DocumentMetadataCreate = Body(...), - repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), + repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), user: TokenData = Depends(get_current_user), ) -> DocumentMetadataRead: - """ Uploads document metadata. @@ -53,10 +57,11 @@ async def upload_document_metadata( async def get_documents_metadata( limit: int = Query(default=10, lt=100), offset: int = Query(default=0), - repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), + repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), user: TokenData = Depends(get_current_user), ) -> Dict[str, Union[List[DocumentMetadataRead], Any]]: - """ Retrieves a list of document metadata. @@ -81,10 +86,11 @@ async def get_documents_metadata( ) async def get_document_metadata( document: Union[str, UUID], - repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), + repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), user: TokenData = Depends(get_current_user), ) -> Union[DocumentMetadataRead, HTTPException]: - """ Retrieves the metadata of a specific document. @@ -109,11 +115,12 @@ async def get_document_metadata( async def update_doc_metadata_details( document: Union[str, UUID], document_patch: DocumentMetadataPatch = Body(...), - repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), + repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), user_repository: AuthRepository = Depends(get_repository(AuthRepository)), user: TokenData = Depends(get_current_user), ) -> Union[DocumentMetadataRead, HTTPException]: - """ Updates the details of a document's metadata. @@ -135,16 +142,14 @@ async def update_doc_metadata_details( try: await repository.get(document=document, owner=user) except Exception as e: - raise http_404( - msg=f"No Document with: {document}" - ) from e + raise http_404(msg=f"No Document with: {document}") from e return await repository.patch( document=document, document_patch=document_patch, owner=user, user_repo=user_repository, - is_owner=True + is_owner=True, ) @@ -155,7 +160,9 @@ async def update_doc_metadata_details( ) async def delete_document_metadata( document: Union[str, UUID], - repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), + repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), user: TokenData = Depends(get_current_user), ) -> None: """ @@ -177,15 +184,14 @@ async def delete_document_metadata( try: await repository.get(document=document, owner=user) except Exception as e: - raise http_404( - msg=f"No document with the detail: {document}." - ) from e + raise http_404(msg=f"No document with the detail: {document}.") from e return await repository.delete(document=document, owner=user) # Archiving + @router.post( "/archive/{file_name)", response_model=DocumentMetadataRead, @@ -193,11 +199,12 @@ async def delete_document_metadata( name="archive_a_document", ) async def archive( - file_name: str, - repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), - user: TokenData = Depends(get_current_user), + file_name: str, + repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + user: TokenData = Depends(get_current_user), ) -> DocumentMetadataRead: - """ Archive a document. @@ -221,10 +228,11 @@ async def archive( name="archived_doc_list", ) async def archive_list( - repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), - user: TokenData = Depends(get_current_user), + repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + user: TokenData = Depends(get_current_user), ) -> Dict[str, List[str] | int]: - """ Get the list of archived documents. @@ -247,11 +255,12 @@ async def archive_list( name="remove_doc_from_archive", ) async def un_archive( - file: str, - repository: DocumentMetadataRepository = Depends(get_repository(DocumentMetadataRepository)), - user: TokenData = Depends(get_current_user), + file: str, + repository: DocumentMetadataRepository = Depends( + get_repository(DocumentMetadataRepository) + ), + user: TokenData = Depends(get_current_user), ) -> DocumentMetadataRead: - """ Un-archive a document. diff --git a/app/api/routes/documents/notify.py b/app/api/routes/documents/notify.py index 03a1ac9..85a9151 100644 --- a/app/api/routes/documents/notify.py +++ b/app/api/routes/documents/notify.py @@ -13,16 +13,11 @@ router = APIRouter(tags=["Notification"]) -@router.get( - "", - status_code=status.HTTP_200_OK, - name="get_notifications" -) +@router.get("", status_code=status.HTTP_200_OK, name="get_notifications") async def get_notifications( - repository: NotifyRepo = Depends(get_repository(NotifyRepo)), - user: TokenData = Depends(get_current_user) + repository: NotifyRepo = Depends(get_repository(NotifyRepo)), + user: TokenData = Depends(get_current_user), ) -> List[Notification]: - """ Get notifications for a user. @@ -43,10 +38,10 @@ async def get_notifications( name="patch_status", ) async def patch_status( - updated_status: NotifyPatchStatus = None, - notification_id: UUID = None, - repository: NotifyRepo = Depends(get_repository(NotifyRepo)), - user: TokenData = Depends(get_current_user) + updated_status: NotifyPatchStatus = None, + notification_id: UUID = None, + repository: NotifyRepo = Depends(get_repository(NotifyRepo)), + user: TokenData = Depends(get_current_user), ) -> Union[List[Notification], Notification]: """ Patch the status of a notification or mark all notifications as read. @@ -69,10 +64,12 @@ async def patch_status( if updated_status.mark_all: return await repository.mark_all_read(user=user) if notification_id: - return await repository.update_status(n_id=notification_id, updated_status=updated_status, user=user) + return await repository.update_status( + n_id=notification_id, updated_status=updated_status, user=user + ) raise http_404( msg="Bad Request: Make sure to either flag mark_all " - "or enter notification_id along with correct status as payload." + "or enter notification_id along with correct status as payload." ) @@ -82,8 +79,8 @@ async def patch_status( name="clear_all_notifications", ) async def clear_all_notifications( - repository: NotifyRepo = Depends(get_repository(NotifyRepo)), - user: TokenData = Depends(get_current_user) + repository: NotifyRepo = Depends(get_repository(NotifyRepo)), + user: TokenData = Depends(get_current_user), ) -> None: """ Clear all notifications for a user. diff --git a/app/core/config.py b/app/core/config.py index c7b8def..66060e2 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -10,6 +10,7 @@ class GlobalConfig(BaseSettings): """ Global Configuration for the FastAPI application. """ + title: str = os.environ.get("TITLE") version: str = "1.0.0" description: str = os.environ.get("DESCRIPTION") @@ -44,13 +45,17 @@ class GlobalConfig(BaseSettings): @property def sync_database_url(self) -> str: - return (f"postgresql://{self.postgres_user}:{self.postgres_password}@" - f"{self.postgres_hostname}:{self.postgres_port}/{self.postgres_db}") + return ( + f"postgresql://{self.postgres_user}:{self.postgres_password}@" + f"{self.postgres_hostname}:{self.postgres_port}/{self.postgres_db}" + ) @property def async_database_url(self) -> str: - return (f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}@" - f"{self.postgres_hostname}:{self.postgres_port}/{self.postgres_db}") + return ( + f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}@" + f"{self.postgres_hostname}:{self.postgres_port}/{self.postgres_db}" + ) settings = GlobalConfig() diff --git a/app/core/exceptions.py b/app/core/exceptions.py index 601f49e..a294536 100644 --- a/app/core/exceptions.py +++ b/app/core/exceptions.py @@ -9,9 +9,13 @@ def http_400(msg: str = "Bad Request...") -> HTTPException: return HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=msg) -def http_401(msg: str = "Unauthorized", headers: Dict[str, str] = None) -> HTTPException: +def http_401( + msg: str = "Unauthorized", headers: Dict[str, str] = None +) -> HTTPException: """Unauthorized Access""" - return HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=msg, headers=headers) + return HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=msg, headers=headers + ) def http_403(msg: str = "Forbidden") -> HTTPException: diff --git a/app/db/models.py b/app/db/models.py index 982e45e..2149d18 100644 --- a/app/db/models.py +++ b/app/db/models.py @@ -22,18 +22,14 @@ query_cache_size=0, ) -session = sessionmaker( - bind=engine, - autocommit=False, - autoflush=False -) +session = sessionmaker(bind=engine, autocommit=False, autoflush=False) async_session = sessionmaker( bind=async_engine, class_=AsyncSession, autocommit=False, autoflush=False, - expire_on_commit=False + expire_on_commit=False, ) Base = declarative_base() diff --git a/app/db/repositories/auth/auth.py b/app/db/repositories/auth/auth.py index 16b7bed..6ab041f 100644 --- a/app/db/repositories/auth/auth.py +++ b/app/db/repositories/auth/auth.py @@ -3,7 +3,12 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession -from app.api.dependencies.auth_utils import get_hashed_password, verify_password, create_access_token, create_refresh_token +from app.api.dependencies.auth_utils import ( + get_hashed_password, + verify_password, + create_access_token, + create_refresh_token, +) from app.core.exceptions import http_400, http_403 from app.db.tables.auth.auth import User from app.schemas.auth.bands import UserOut, UserAuth @@ -13,26 +18,21 @@ class AuthRepository: def __init__(self, session: AsyncSession) -> None: self.session = session - async def _check_user_or_none(self, userdata: UserAuth) -> Coroutine[Any, Any, Any | None]: - stmt = ( - select(User) - .where(User.username == userdata.username or User.email == userdata.email) + async def _check_user_or_none( + self, userdata: UserAuth + ) -> Coroutine[Any, Any, Any | None]: + stmt = select(User).where( + User.username == userdata.username or User.email == userdata.email ) result = await self.session.execute(stmt) return result.scalar_one_or_none() async def get_user(self, field: str, detail: str): - stmt = '' + stmt = "" if field == "username": - stmt = ( - select(User) - .where(User.username == detail) - ) + stmt = select(User).where(User.username == detail) elif field == "email": - stmt = ( - select(User) - .where(User.email == detail) - ) + stmt = select(User).where(User.email == detail) result = await self.session.execute(stmt) return result.scalar_one_or_none() @@ -59,11 +59,17 @@ async def login(self, ipdata): raise http_403(msg="Recheck the credentials") user = user.__dict__ hashed_password = user.get("password") - if not verify_password(password=ipdata.password, hashed_password=hashed_password): + if not verify_password( + password=ipdata.password, hashed_password=hashed_password + ): raise http_403("Incorrect Password") return { "token_type": "bearer", - "access_token": create_access_token(subject={"id": user.get("id"), "username": user.get("username")}), - "refresh_token": create_refresh_token(subject={"id": user.get("id"), "username": user.get("username")}) + "access_token": create_access_token( + subject={"id": user.get("id"), "username": user.get("username")} + ), + "refresh_token": create_refresh_token( + subject={"id": user.get("id"), "username": user.get("username")} + ), } diff --git a/app/db/repositories/documents/document_organization.py b/app/db/repositories/documents/document_organization.py index c0b0f32..e12237b 100644 --- a/app/db/repositories/documents/document_organization.py +++ b/app/db/repositories/documents/document_organization.py @@ -9,13 +9,11 @@ class DocumentOrgRepository: Repository for managing document organization. """ - def __init__(self): - ... + def __init__(self): ... @staticmethod async def _search_tags( - docs: List[DocumentMetadataRead], - tags: List[str] + docs: List[DocumentMetadataRead], tags: List[str] ) -> List[Dict[str, str]]: result = [] @@ -24,14 +22,14 @@ async def _search_tags( result.extend( doc for tag in tags - if doc["tags"] and ''.join(tag.split()) in doc["tags"] + if doc["tags"] and "".join(tag.split()) in doc["tags"] ) return result or None @staticmethod async def _search_category( - docs: List[DocumentMetadataRead], categories: List[str] + docs: List[DocumentMetadataRead], categories: List[str] ) -> List[Dict[str, str]]: result = [] @@ -40,21 +38,21 @@ async def _search_category( result.extend( doc for category in categories - if doc["categories"] and ''.join(category.split()) in doc["categories"] + if doc["categories"] and "".join(category.split()) in doc["categories"] ) return result or None @staticmethod async def _search_file_type( - docs: List[DocumentMetadataRead], file_types: List[str] + docs: List[DocumentMetadataRead], file_types: List[str] ) -> List[Dict[str, str]]: result = [] for doc in docs: doc = doc.__dict__ for ftype in file_types: - ftype = ''.join(ftype.split()) + ftype = "".join(ftype.split()) result.extend( doc for key, val in SUPPORTED_FILE_TYPES.items() @@ -65,16 +63,14 @@ async def _search_file_type( @staticmethod async def _search_by_status( - docs: List[DocumentMetadataRead], status: List[str] + docs: List[DocumentMetadataRead], status: List[str] ) -> List[Dict[str, str]]: result = [] for doc in docs: doc = doc.__dict__ result.extend( - doc - for stat in status - if str(doc["status"]) == f"StatusEnum.{stat}" + doc for stat in status if str(doc["status"]) == f"StatusEnum.{stat}" ) return result or None @@ -85,25 +81,29 @@ async def search_doc( tags: str, categories: str, file_types: str, - status: str + status: str, ) -> Union[List[List[Dict[str, Any]]], None]: results = [] if tags: - tags = tags.split(',') + tags = tags.split(",") results.append(await self._search_tags(docs=docs, tags=tags)) if categories: - categories = categories.split(',') - results.append(await self._search_category(docs=docs, categories=categories)) + categories = categories.split(",") + results.append( + await self._search_category(docs=docs, categories=categories) + ) if file_types: - file_type = file_types.split(',') - results.append(await self._search_file_type(docs=docs, file_types=file_type)) + file_type = file_types.split(",") + results.append( + await self._search_file_type(docs=docs, file_types=file_type) + ) if status: - _status = status.split(',') + _status = status.split(",") results.append(await self._search_by_status(docs=docs, status=_status)) return results diff --git a/app/db/repositories/documents/document_sharing.py b/app/db/repositories/documents/document_sharing.py index 8edd9e9..1e2abe7 100644 --- a/app/db/repositories/documents/document_sharing.py +++ b/app/db/repositories/documents/document_sharing.py @@ -28,16 +28,13 @@ class DocumentSharingRepository: """ def __init__(self, session: AsyncSession) -> None: - self.client = boto3.client('s3') + self.client = boto3.client("s3") self.session = session async def get_user_mail(self, user: TokenData): - stmt = ( - select(User) - .where(User.id == user.id) - ) + stmt = select(User).where(User.id == user.id) execute = await self.session.execute(stmt) @@ -46,18 +43,15 @@ async def get_user_mail(self, user: TokenData): @staticmethod async def _generate_id(url: str) -> str: hash_object = hashlib.md5() - hash_object.update(url.encode('utf-8')) + hash_object.update(url.encode("utf-8")) n = randint(0, 25) - return hash_object.hexdigest()[n:n+6] + return hash_object.hexdigest()[n : n + 6] async def _get_saved_links(self, filename: str) -> Dict[str, Any]: - stmt = ( - select(DocumentSharing) - .where(DocumentSharing.filename == filename) - ) + stmt = select(DocumentSharing).where(DocumentSharing.filename == filename) result = await self.session.execute(stmt) return result.scalar_one_or_none() @@ -67,49 +61,41 @@ async def update_visits(self, filename: str, visits_left: int): await self.session.execute( update(DocumentSharing) .where(DocumentSharing.filename == filename) - .values(visits=visits_left-1) + .values(visits=visits_left - 1) ) elif visits_left == 1: await self.session.execute( - delete(DocumentSharing) - .where(DocumentSharing.filename == filename) + delete(DocumentSharing).where(DocumentSharing.filename == filename) ) async def cleanup_expired_links(self): now = datetime.now(timezone.utc) - stmt = ( - delete(DocumentSharing) - .where(DocumentSharing.expires_at <= now) - ) + stmt = delete(DocumentSharing).where(DocumentSharing.expires_at <= now) try: await self.session.execute(stmt) except Exception as e: raise http_500() from e async def get_presigned_url( - self, doc: Dict[str, Any] + self, doc: Dict[str, Any] ) -> Union[str, Dict[str, str]]: try: params = { - 'Bucket': settings.s3_bucket, - 'Key': await get_key(s3_url=doc["s3_url"]) + "Bucket": settings.s3_bucket, + "Key": await get_key(s3_url=doc["s3_url"]), } response = self.client.generate_presigned_url( - 'get_object', - Params=params, - ExpiresIn=3600 + "get_object", Params=params, ExpiresIn=3600 ) except NoCredentialsError as e: - return { - "error": f"Invalid AWS Credentials: {e}" - } + return {"error": f"Invalid AWS Credentials: {e}"} return response async def get_shareable_link( - self, owner_id: str, url: str, visits: int, filename: str, share_to: List[str] + self, owner_id: str, url: str, visits: int, filename: str, share_to: List[str] ): # task to clean uo the database for expired links @@ -121,8 +107,8 @@ async def get_shareable_link( "note": f"Links already shared... valid Till {ans['expires_at']}", "response": { "shareable_link": f"{settings.host_url}{settings.api_prefix}/doc/{ans['url_id']}", - "visits_left": ans["visits"] - } + "visits_left": ans["visits"], + }, } url_id = await self._generate_id(url=url) @@ -133,7 +119,7 @@ async def get_shareable_link( url=url, expires_at=datetime.now(timezone.utc) + timedelta(seconds=3599), visits=visits, - share_to=share_to + share_to=share_to, ) try: self.session.add(share_entry) @@ -143,23 +129,22 @@ async def get_shareable_link( response = share_entry.__dict__ return { "shareable_link": f"{settings.host_url}{settings.api_prefix}/doc/{response['url_id']}", - "visits": response["visits"] + "visits": response["visits"], } except Exception as e: raise http_500() from e async def get_redirect_url(self, url_id: str): - stmt = ( - select(DocumentSharing) - .where(DocumentSharing.url_id == url_id) - ) + stmt = select(DocumentSharing).where(DocumentSharing.url_id == url_id) result = await self.session.execute(stmt) try: result = result.scalar_one_or_none().__dict__ - await self.update_visits(filename=result["filename"], visits_left=result["visits"]) + await self.update_visits( + filename=result["filename"], visits_left=result["visits"] + ) return result["url"] except AttributeError as e: @@ -168,7 +153,7 @@ async def get_redirect_url(self, url_id: str): ) from e async def send_mail( - self, user: TokenData, mail_to: Union[List[str], None], link: str + self, user: TokenData, mail_to: Union[List[str], None], link: str ) -> None: if mail_to: @@ -181,16 +166,13 @@ async def send_mail( """ for mails in mail_to: - mail_service(mail_to=mails, subject=subj, content=content, file_path=None) + mail_service( + mail_to=mails, subject=subj, content=content, file_path=None + ) - async def confirm_access( - self, user: TokenData, url_id: str | None - ) -> bool: + async def confirm_access(self, user: TokenData, url_id: str | None) -> bool: # check if login user is owner or to whom it is shared - stmt = ( - select(DocumentSharing) - .where(DocumentSharing.url_id == url_id) - ) + stmt = select(DocumentSharing).where(DocumentSharing.url_id == url_id) result = await self.session.execute(stmt) try: @@ -203,19 +185,18 @@ async def confirm_access( or user.username in result.get("share_to") ) except Exception as e: - raise http_404( - msg="The link has expired..." - ) from e + raise http_404(msg="The link has expired...") from e async def share_document( - self, filename: str, - document_key: str, - file: Any, - share_request: SharingRequest, - notify: bool, - owner: TokenData, - notify_repo: NotifyRepo, - auth_repo: AuthRepository, + self, + filename: str, + document_key: str, + file: Any, + share_request: SharingRequest, + notify: bool, + owner: TokenData, + notify_repo: NotifyRepo, + auth_repo: AuthRepository, ) -> None: user_mail = await self.get_user_mail(owner) @@ -241,16 +222,10 @@ async def share_document( DocFlow """ mail_service( - mail_to=mails, - subject=subject, - content=content, - file_path=temp_path + mail_to=mails, subject=subject, content=content, file_path=temp_path ) if notify: return await notify_repo.notify( - user=owner, - receivers=share_to, - filename=filename, - auth_repo=auth_repo + user=owner, receivers=share_to, filename=filename, auth_repo=auth_repo ) diff --git a/app/db/repositories/documents/documents.py b/app/db/repositories/documents/documents.py index 88bd838..235b0b8 100644 --- a/app/db/repositories/documents/documents.py +++ b/app/db/repositories/documents/documents.py @@ -18,8 +18,7 @@ async def perm_delete( - file: str, - delete_all: bool, meta_repo: DocumentMetadataRepository, user: TokenData + file: str, delete_all: bool, meta_repo: DocumentMetadataRepository, user: TokenData ) -> None: if delete_all: @@ -35,14 +34,11 @@ async def perm_delete( class DocumentRepository: def __init__(self): - self.s3_client = boto3.resource('s3') - self.client = boto3.client('s3') + self.s3_client = boto3.resource("s3") + self.client = boto3.client("s3") self.s3_bucket = self.s3_client.Bucket(settings.s3_bucket) self.client.put_bucket_versioning( - Bucket=settings.s3_bucket, - VersioningConfiguration={ - 'Status': 'Enabled' - } + Bucket=settings.s3_bucket, VersioningConfiguration={"Status": "Enabled"} ) @staticmethod @@ -56,7 +52,7 @@ async def _calculate_file_hash(file: File) -> str: async def get_s3_file_object_body(self, key: str): s3_object = self.client.get_object(Bucket=settings.s3_bucket, Key=key) - file = s3_object['Body'].read() + file = s3_object["Body"].read() return file @@ -68,7 +64,7 @@ async def _delete_object(self, key: str) -> None: raise e async def _upload_new_file( - self, file: File, folder: str, contents, file_type: str, user: TokenData + self, file: File, folder: str, contents, file_type: str, user: TokenData ) -> Dict[str, Any]: if folder is None: @@ -86,14 +82,18 @@ async def _upload_new_file( "s3_url": await get_s3_url(key=key), "size": len(contents), "file_type": file_type, - "file_hash": await self._calculate_file_hash(file=file) - } + "file_hash": await self._calculate_file_hash(file=file), + }, } async def _upload_new_version( - self, doc: dict, file: File, - contents, file_type: str, new_file_hash: str, - is_owner: bool + self, + doc: dict, + file: File, + contents, + file_type: str, + new_file_hash: str, + is_owner: bool, ) -> Dict[str, Any]: key = await get_key(s3_url=doc["s3_url"]) @@ -108,12 +108,12 @@ async def _upload_new_version( "s3_url": await get_s3_url(key=key), "size": len(contents), "file_type": file_type, - "file_hash": new_file_hash - } + "file_hash": new_file_hash, + }, } async def upload( - self, metadata_repo, user_repo, file: File, folder: str, user: TokenData + self, metadata_repo, user_repo, file: File, folder: str, user: TokenData ) -> Dict[str, Any]: """ Uploads a file to the specified folder in the document repository. @@ -134,9 +134,7 @@ async def upload( file_type = file.content_type if file_type not in SUPPORTED_FILE_TYPES: - raise http_400( - msg=f"File type {file_type} not supported." - ) + raise http_400(msg=f"File type {file_type} not supported.") contents = await file.read() @@ -149,18 +147,26 @@ async def upload( if get_doc := await metadata_repo.get_doc(filename=file.filename): get_doc = get_doc.__dict__ # Check if logged-in user has update access - logged_in_user = (await user_repo.get_user( - field="username", detail=user.username - )).__dict__ - if ((get_doc["access_to"] is not None) and - logged_in_user["email"] in get_doc["access_to"]): - if get_doc['file_hash'] != new_file_hash: + logged_in_user = ( + await user_repo.get_user(field="username", detail=user.username) + ).__dict__ + if (get_doc["access_to"] is not None) and logged_in_user[ + "email" + ] in get_doc["access_to"]: + if get_doc["file_hash"] != new_file_hash: # can upload a version to a file... - print(f"Have update access, to a file... owner: {get_doc['owner_id']}") + print( + f"Have update access, to a file... owner: {get_doc['owner_id']}" + ) return await self._upload_new_version( - doc=get_doc, file=file, contents=contents, file_type=file_type, - new_file_hash=await self._calculate_file_hash(file=file), - is_owner=False + doc=get_doc, + file=file, + contents=contents, + file_type=file_type, + new_file_hash=await self._calculate_file_hash( + file=file + ), + is_owner=False, ) else: return await self._upload_new_file( @@ -168,13 +174,19 @@ async def upload( folder=folder, contents=contents, file_type=file_type, - user=user + user=user, ) return await self._upload_new_file( - file=file, folder=folder, contents=contents, file_type=file_type, user=user + file=file, + folder=folder, + contents=contents, + file_type=file_type, + user=user, ) - print(f"File {file.filename} already present, checking if there is an update...") + print( + f"File {file.filename} already present, checking if there is an update..." + ) if doc["file_hash"] != new_file_hash: print("File has been updated, uploading new version...") @@ -184,12 +196,12 @@ async def upload( contents=contents, file_type=file_type, new_file_hash=new_file_hash, - is_owner=True + is_owner=True, ) return { "response": "File already present and no changes detected.", - "upload": "Noting to update..." + "upload": "Noting to update...", } except Exception as e: raise http_404(msg="Error uploading the file...") from e @@ -202,12 +214,10 @@ async def download(self, s3_url: str, name: str) -> Dict[str, str]: self.s3_client.meta.client.download_file( Bucket=settings.s3_bucket, Key=await key, - Filename=r"/app/downloads/docflow_" + f"{name}" + Filename=r"/app/downloads/docflow_" + f"{name}", ) except ClientError as e: - raise http_404( - msg=f"File not found: {e}" - ) from e + raise http_404(msg=f"File not found: {e}") from e return {"message": f"successfully downloaded {name} in downloads folder."} @@ -219,10 +229,10 @@ async def preview(self, document: Dict[str, Any]) -> FileResponse: # Determining the file extension from the key and media type for File Response _, extension = os.path.splitext(key) - if extension.lower() in ['.jpg', '.jpeg', '.png', '.gif']: - media_type = 'image/' + extension.lower().lstrip('.') - elif extension.lower() == '.pdf': - media_type = 'application/pdf' + if extension.lower() in [".jpg", ".jpeg", ".png", ".gif"]: + media_type = "image/" + extension.lower().lstrip(".") + elif extension.lower() == ".pdf": + media_type = "application/pdf" else: raise ValueError("Unsupported file type.") diff --git a/app/db/repositories/documents/documents_metadata.py b/app/db/repositories/documents/documents_metadata.py index c660218..86bf59d 100644 --- a/app/db/repositories/documents/documents_metadata.py +++ b/app/db/repositories/documents/documents_metadata.py @@ -15,7 +15,10 @@ from app.db.tables.base_class import StatusEnum from app.schemas.auth.bands import TokenData from app.schemas.documents.bands import DocumentMetadataPatch -from app.schemas.documents.documents_metadata import DocumentMetadataCreate, DocumentMetadataRead +from app.schemas.documents.documents_metadata import ( + DocumentMetadataCreate, + DocumentMetadataRead, +) class DocumentMetadataRepository: @@ -53,15 +56,17 @@ async def _extract_changes(document_patch: DocumentMetadataPatch) -> dict: return document_patch return document_patch.model_dump(exclude_unset=True) - async def _execute_update(self, db_document: DocumentMetadata | Dict[str, Any], changes: dict) -> None: + async def _execute_update( + self, db_document: DocumentMetadata | Dict[str, Any], changes: dict + ) -> None: if isinstance(db_document, dict): stmt = ( update(DocumentMetadata) - .where(DocumentMetadata.id == db_document.get('id')) + .where(DocumentMetadata.id == db_document.get("id")) .values(changes) ) - doc_name = db_document.get('name') + doc_name = db_document.get("name") else: stmt = ( update(DocumentMetadata) @@ -73,9 +78,7 @@ async def _execute_update(self, db_document: DocumentMetadata | Dict[str, Any], try: await self.session.execute(stmt) except Exception as e: - raise http_409( - msg=f"Error while updating document: {doc_name}" - ) from e + raise http_409(msg=f"Error while updating document: {doc_name}") from e async def _update_access_and_permission(self, db_document, changes, user_repo): @@ -83,14 +86,14 @@ async def _update_access_and_permission(self, db_document, changes, user_repo): # if access_to has email ids, update doc_user_access table with doc_id and user_id for user_email in access_given_to: try: - user_id = (await user_repo.get_user(field="email", detail=user_email)).__dict__["id"] + user_id = ( + await user_repo.get_user(field="email", detail=user_email) + ).__dict__["id"] # update doc_user_access table with doc_id and user_id await self._update_doc_user_access(db_document, user_id) except IntegrityError as e: - raise http_409( - msg=f"User '{user_email}' already has access..." - ) from e + raise http_409(msg=f"User '{user_email}' already has access...") from e except AttributeError as e: raise http_404( msg=f"The user with '{user_email}' does not exists, make sure user has account in DocFlow." @@ -98,20 +101,23 @@ async def _update_access_and_permission(self, db_document, changes, user_repo): async def _update_doc_user_access(self, db_document, user_id): - stmt = insert(doc_user_access).values(doc_id=db_document.__dict__["id"], user_id=user_id) + stmt = insert(doc_user_access).values( + doc_id=db_document.__dict__["id"], user_id=user_id + ) await self.session.execute(stmt) await self.session.commit() async def _delete_access(self, document) -> None: - await self.session.execute(doc_user_access.delete().where(doc_user_access.c.doc_id == document.id)) + await self.session.execute( + doc_user_access.delete().where(doc_user_access.c.doc_id == document.id) + ) async def _auto_delete(self, bin_items: List) -> bool | None: for item in bin_items: if item.DocumentMetadata.created_at <= datetime.now(timezone.utc): - stmt = ( - delete(DocumentMetadata) - .where(DocumentMetadata.id == item.DocumentMetadata.id) + stmt = delete(DocumentMetadata).where( + DocumentMetadata.id == item.DocumentMetadata.id ) await self.session.execute(stmt) return True @@ -138,7 +144,9 @@ async def get_doc(self, filename: str) -> Dict[str, Any]: return result.scalar_one_or_none() - async def upload(self, document_upload: DocumentMetadataCreate) -> DocumentMetadataRead: + async def upload( + self, document_upload: DocumentMetadataCreate + ) -> DocumentMetadataRead: if not isinstance(document_upload, dict): db_document = DocumentMetadata(**document_upload.model_dump()) @@ -157,7 +165,7 @@ async def upload(self, document_upload: DocumentMetadataCreate) -> DocumentMetad return DocumentMetadataRead(**db_document.__dict__) async def doc_list( - self, owner: TokenData, limit: int = 10, offset: int = 0 + self, owner: TokenData, limit: int = 10, offset: int = 0 ) -> Dict[str, Union[List[DocumentMetadataRead], Any]]: stmt = ( @@ -174,30 +182,32 @@ async def doc_list( result_list = result.fetchall() for row in result_list: - row.doc_cls.__dict__.pop('_sa_instance_state', None) + row.doc_cls.__dict__.pop("_sa_instance_state", None) - result = [DocumentMetadataRead(**row.doc_cls.__dict__) for row in result_list] - return { - f"documents of {owner.username}": result, - "no_of_docs": len(result) - } + result = [ + DocumentMetadataRead(**row.doc_cls.__dict__) for row in result_list + ] + return {f"documents of {owner.username}": result, "no_of_docs": len(result)} except Exception as e: raise http_404(msg="No Documents found") from e - async def get(self, document: Union[str, UUID], owner: TokenData) -> Union[DocumentMetadataRead, HTTPException]: + async def get( + self, document: Union[str, UUID], owner: TokenData + ) -> Union[DocumentMetadataRead, HTTPException]: db_document = await self._get_instance(document=document, owner=owner) if db_document is None: - return http_409( - msg=f"No Document with {document}" - ) + return http_409(msg=f"No Document with {document}") return DocumentMetadataRead(**db_document.__dict__) async def patch( - self, - document: Union[str, UUID], document_patch: DocumentMetadataPatch, owner: TokenData, - user_repo: AuthRepository, is_owner: bool + self, + document: Union[str, UUID], + document_patch: DocumentMetadataPatch, + owner: TokenData, + user_repo: AuthRepository, + is_owner: bool, ) -> Union[DocumentMetadataRead, HTTPException]: if is_owner: @@ -230,7 +240,11 @@ async def delete(self, document: Union[str, UUID], owner: TokenData) -> None: setattr(db_document, "file_type", None) setattr(db_document, "categories", None) # considering created_at as delete_at to delete it after 30 days - setattr(db_document, "created_at", datetime.now(timezone.utc) + timedelta(days=30)) + setattr( + db_document, + "created_at", + datetime.now(timezone.utc) + timedelta(days=30), + ) # delete entry from doc_user_access table await self._delete_access(document=db_document) @@ -239,9 +253,7 @@ async def delete(self, document: Union[str, UUID], owner: TokenData) -> None: await self.session.commit() except Exception as e: - raise http_404( - msg=f"No file with {document}" - ) from e + raise http_404(msg=f"No file with {document}") from e async def bin_list(self, owner: TokenData) -> Dict[str, List[Row | Row] | int]: @@ -256,10 +268,7 @@ async def bin_list(self, owner: TokenData) -> Dict[str, List[Row | Row] | int]: if await self._auto_delete(result): result = (await self.session.execute(stmt)).fetchall() - return { - "response": result, - "no_of_docs": len(result) - } + return {"response": result, "no_of_docs": len(result)} async def restore(self, file: str, owner: TokenData) -> DocumentMetadataRead: @@ -268,15 +277,13 @@ async def restore(self, file: str, owner: TokenData) -> DocumentMetadataRead: if doc_list["no_of_docs"] > 0: for doc in doc_list["response"]: if doc.DocumentMetadata.name == file: - change = {'status': StatusEnum.private} - await self._execute_update(db_document=doc.DocumentMetadata, changes=change) + change = {"status": StatusEnum.private} + await self._execute_update( + db_document=doc.DocumentMetadata, changes=change + ) return DocumentMetadataRead(**doc.DocumentMetadata.__dict__) - raise http_409( - msg="Doc is not deleted" - ) - raise http_404( - msg="Doc does not exists" - ) + raise http_409(msg="Doc is not deleted") + raise http_404(msg="Doc does not exists") async def perm_delete_a_doc(self, document: UUID | None, owner: TokenData) -> None: @@ -304,7 +311,7 @@ async def archive(self, file: str, user: TokenData): doc = await self._get_instance(document=file, owner=user) if doc and doc.status != StatusEnum.archived: - change = {'status': StatusEnum.archived} + change = {"status": StatusEnum.archived} await self._execute_update(db_document=doc, changes=change) return DocumentMetadataRead(**doc.__dict__) @@ -322,23 +329,16 @@ async def archive_list(self, user: TokenData) -> Dict[str, List[str] | int]: ) result = (await self.session.execute(stmt)).fetchall() - return { - "response": result, - "no_of_docs": len(result) - } + return {"response": result, "no_of_docs": len(result)} - async def un_archive(self, file: str, user: TokenData) -> DocumentMetadataRead: + async def un_archive(self, file: str, user: TokenData) -> DocumentMetadataRead: doc = await self._get_instance(document=file, owner=user) if doc and doc.status == StatusEnum.archived: - change = {'status': 'private'} + change = {"status": "private"} await self._execute_update(db_document=doc, changes=change) return DocumentMetadataRead(**doc.__dict__) if doc and doc.status != StatusEnum.archived: - raise http_409( - msg="Doc is not archived" - ) - raise http_404( - msg="Doc does not exits" - ) + raise http_409(msg="Doc is not archived") + raise http_404(msg="Doc does not exits") diff --git a/app/db/repositories/documents/notify.py b/app/db/repositories/documents/notify.py index c1698ac..c621fdf 100644 --- a/app/db/repositories/documents/notify.py +++ b/app/db/repositories/documents/notify.py @@ -17,7 +17,13 @@ class NotifyRepo: def __init__(self, session: AsyncSession) -> None: self.session = session - async def notify(self, user: TokenData, receivers: List[str], filename: str, auth_repo: AuthRepository) -> None: + async def notify( + self, + user: TokenData, + receivers: List[str], + filename: str, + auth_repo: AuthRepository, + ) -> None: """ Notify users about a shared file. @@ -41,7 +47,7 @@ async def notify(self, user: TokenData, receivers: List[str], filename: str, aut notify_entry = Notify( receiver_id=receiver_details.__dict__["id"], message=f"{user.username} shared {filename} with you! Access the shared file via mail...", - status=NotifyEnum.unread + status=NotifyEnum.unread, ) try: @@ -72,18 +78,13 @@ async def get_notification_by_id(self, n_id: UUID, user: TokenData) -> Notificat HTTP_404: If no notification with the given ID is found. """ - stmt = ( - select(Notify) - .where(Notify.receiver_id == user.id and Notify.id == n_id) - ) + stmt = select(Notify).where(Notify.receiver_id == user.id and Notify.id == n_id) try: result = (await self.session.execute(stmt)).scalar_one_or_none() return Notification(**result.__dict__) except Exception as e: - raise http_404( - msg=f"No notification with id: {n_id}" - ) from e + raise http_404(msg=f"No notification with id: {n_id}") from e async def get_notifications(self, user: TokenData) -> List[Notification]: """ @@ -96,10 +97,7 @@ async def get_notifications(self, user: TokenData) -> List[Notification]: List[Notification]: A list of notification objects. """ - stmt = ( - select(Notify) - .where(Notify.receiver_id == user.id) - ) + stmt = select(Notify).where(Notify.receiver_id == user.id) notifications = (await self.session.execute(stmt)).fetchall() @@ -132,12 +130,11 @@ async def mark_all_read(self, user: TokenData) -> List[Notification]: await self.session.execute(stmt) return await self.get_notifications(user=user) except Exception as e: - raise http_409( - msg="Error updating marking notification read..." - ) from e - - async def update_status(self, n_id: UUID, updated_status: NotifyPatchStatus, user: TokenData): + raise http_409(msg="Error updating marking notification read...") from e + async def update_status( + self, n_id: UUID, updated_status: NotifyPatchStatus, user: TokenData + ): """ Update the status of a notification for a specific user. @@ -154,7 +151,11 @@ async def update_status(self, n_id: UUID, updated_status: NotifyPatchStatus, use """ stmt = ( update(Notify) - .where(Notify.receiver_id == user.id and Notify.id == n_id and Notify.status != updated_status.status) + .where( + Notify.receiver_id == user.id + and Notify.id == n_id + and Notify.status != updated_status.status + ) .values({Notify.status: updated_status.status}) ) @@ -162,9 +163,7 @@ async def update_status(self, n_id: UUID, updated_status: NotifyPatchStatus, use await self.session.execute(stmt) return await self.get_notification_by_id(n_id=n_id, user=user) except Exception as e: - raise http_409( - msg="Error updating notification status..." - ) from e + raise http_409(msg="Error updating notification status...") from e async def clear_notification(self, user: TokenData) -> None: """ @@ -180,10 +179,7 @@ async def clear_notification(self, user: TokenData) -> None: Exception: If an error occurs while clearing the notifications. """ - stmt = ( - delete(Notify) - .where(Notify.receiver_id == user.id) - ) + stmt = delete(Notify).where(Notify.receiver_id == user.id) try: await self.session.execute(stmt) diff --git a/app/db/tables/auth/auth.py b/app/db/tables/auth/auth.py index 625f2d9..503e17d 100644 --- a/app/db/tables/auth/auth.py +++ b/app/db/tables/auth/auth.py @@ -9,11 +9,19 @@ class User(Base): __tablename__ = "users" - id = Column(String(26), primary_key=True, default=get_ulid, unique=True, index=True, nullable=False) + id = Column( + String(26), + primary_key=True, + default=get_ulid, + unique=True, + index=True, + nullable=False, + ) username: str = Column(String, unique=True, nullable=False) email = Column(String, unique=True, nullable=False) password = Column(Text, nullable=False) - user_since = Column(TIMESTAMP(timezone=True), - nullable=False, server_default=text('now()')) + user_since = Column( + TIMESTAMP(timezone=True), nullable=False, server_default=text("now()") + ) owner_of = relationship("DocumentMetadata", back_populates="owner") diff --git a/app/db/tables/base_class.py b/app/db/tables/base_class.py index bdda169..3007cbb 100644 --- a/app/db/tables/base_class.py +++ b/app/db/tables/base_class.py @@ -5,6 +5,7 @@ class StatusEnum(enum.Enum): """ Enum for status of document """ + public = "public" private = "private" shared = "shared" @@ -16,6 +17,7 @@ class NotifyEnum(enum.Enum): """ Enum for status of notification """ + read = "read" unread = "unread" diff --git a/app/db/tables/documents/documents_metadata.py b/app/db/tables/documents/documents_metadata.py index b9b484b..5d454b2 100644 --- a/app/db/tables/documents/documents_metadata.py +++ b/app/db/tables/documents/documents_metadata.py @@ -2,7 +2,18 @@ from uuid import uuid4 from typing import List, Optional -from sqlalchemy import Column, String, Integer, ARRAY, text, DateTime, Enum, ForeignKey, Table, UniqueConstraint +from sqlalchemy import ( + Column, + String, + Integer, + ARRAY, + text, + DateTime, + Enum, + ForeignKey, + Table, + UniqueConstraint, +) from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import Mapped, relationship @@ -11,18 +22,24 @@ doc_user_access = Table( - 'doc_user_access', + "doc_user_access", Base.metadata, - Column('doc_id', UUID(as_uuid=True), ForeignKey('document_metadata.id', ondelete='CASCADE')), - Column('user_id', String(26), ForeignKey('users.id')), - UniqueConstraint('doc_id', 'user_id', name="uq_doc_user_access_doc_user") + Column( + "doc_id", + UUID(as_uuid=True), + ForeignKey("document_metadata.id", ondelete="CASCADE"), + ), + Column("user_id", String(26), ForeignKey("users.id")), + UniqueConstraint("doc_id", "user_id", name="uq_doc_user_access_doc_user"), ) class DocumentMetadata(Base): __tablename__ = "document_metadata" - id: UUID = Column(UUID(as_uuid=True), default=uuid4, primary_key=True, index=True, nullable=False) + id: UUID = Column( + UUID(as_uuid=True), default=uuid4, primary_key=True, index=True, nullable=False + ) owner_id: Mapped[str] = Column(String, ForeignKey("users.id"), nullable=False) name: str = Column(String) s3_url: str = Column(String, unique=True) @@ -30,7 +47,7 @@ class DocumentMetadata(Base): DateTime(timezone=True), default=datetime.now(timezone.utc), nullable=False, - server_default=text("NOW()") + server_default=text("NOW()"), ) size: Optional[int] = Column(Integer) file_type: Optional[str] = Column(String) @@ -40,5 +57,7 @@ class DocumentMetadata(Base): file_hash: Optional[str] = Column(String) access_to: Optional[List[str]] = Column(ARRAY(String)) - update_access = relationship("User", secondary=doc_user_access, passive_deletes=True) + update_access = relationship( + "User", secondary=doc_user_access, passive_deletes=True + ) owner = relationship("User", back_populates="owner_of") diff --git a/app/db/tables/documents/notify.py b/app/db/tables/documents/notify.py index 64a806f..d005c28 100644 --- a/app/db/tables/documents/notify.py +++ b/app/db/tables/documents/notify.py @@ -9,9 +9,11 @@ class Notify(Base): - __tablename__ = 'notify' + __tablename__ = "notify" - id: UUID = Column(UUID(as_uuid=True), default=uuid4, primary_key=True, index=True, nullable=False) + id: UUID = Column( + UUID(as_uuid=True), default=uuid4, primary_key=True, index=True, nullable=False + ) receiver_id: str = Column(String, nullable=False) message: str = Column(Text, nullable=False) status: Enum = Column(Enum(NotifyEnum), default=NotifyEnum.unread) @@ -19,5 +21,5 @@ class Notify(Base): DateTime(timezone=True), default=datetime.now(timezone.utc), nullable=False, - server_default=text("NOW()") + server_default=text("NOW()"), ) diff --git a/app/logs/logger.py b/app/logs/logger.py index 8fe2bec..5212322 100644 --- a/app/logs/logger.py +++ b/app/logs/logger.py @@ -4,7 +4,9 @@ LOGGER_NAME: str = "docflow" -LOG_FORMAT: str = "%(asctime)s [%(levelname)s] | %(name)s | %(filename)s | %(funcName)s | %(lineno)d | %(message)s" +LOG_FORMAT: str = ( + "%(asctime)s [%(levelname)s] | %(name)s | %(filename)s | %(funcName)s | %(lineno)d | %(message)s" +) LOG_LEVEL: int = logging.DEBUG BASE_DIR = abspath(dirname(__file__)) @@ -14,7 +16,7 @@ LOGGING = { "version": 1, "disable_existing_logger": False, - "formatters": { + "formatters": { "standard": { "format": LOG_FORMAT, "datefmt": "%Y-%m-%d %H:%M:%S", @@ -31,47 +33,25 @@ "class": "logging.handlers.RotatingFileHandler", "formatter": "standard", "level": "DEBUG", - "filename": 'docflow.log', - "mode": 'a', + "filename": "docflow.log", + "mode": "a", "encoding": "utf-8", "maxBytes": 500000, - "backupCount": 4 - } + "backupCount": 4, + }, }, "loggers": { - "": { - "handlers": ["default"], - "level": "INFO", - "propagate": True - }, + "": {"handlers": ["default"], "level": "INFO", "propagate": True}, LOGGER_NAME: { "handlers": ["default", "file"], "level": LOG_LEVEL, - "propagate": False - }, - "sqlalchemy": { - "handlers": ["file"], - "level": "WARNING" - }, - "s3": { - "handlers": ["file"], - "level": "WARNING" - }, - "uvicorn.error": { - "level": "INFO", - "handlers": ["default"], - "propagate": False - }, - "uvicorn.access": { - "level": "INFO", - "handlers": ["default"], - "propagate": True - }, - "uvicorn.asgi": { - "level": "INFO", - "handlers": ["default"], - "propagate": True + "propagate": False, }, + "sqlalchemy": {"handlers": ["file"], "level": "WARNING"}, + "s3": {"handlers": ["file"], "level": "WARNING"}, + "uvicorn.error": {"level": "INFO", "handlers": ["default"], "propagate": False}, + "uvicorn.access": {"level": "INFO", "handlers": ["default"], "propagate": True}, + "uvicorn.asgi": {"level": "INFO", "handlers": ["default"], "propagate": True}, }, } diff --git a/app/main.py b/app/main.py index 01b1df4..dc2dc99 100644 --- a/app/main.py +++ b/app/main.py @@ -17,7 +17,7 @@ app.include_router(router=router, prefix=settings.api_prefix) -FAVICON_PATH = 'favicon.ico' +FAVICON_PATH = "favicon.ico" @app.get(FAVICON_PATH, include_in_schema=False, tags=["Default"]) @@ -27,7 +27,9 @@ async def favicon(): @app.get("/", tags=["Default"]) async def root(): - return {"API": "Document Management API... Docker's up!!! is it? or not... Yes it is!!!"} + return { + "API": "Document Management API... Docker's up!!! is it? or not... Yes it is!!!" + } @app.on_event("startup") diff --git a/app/schemas/documents/document_sharing.py b/app/schemas/documents/document_sharing.py index 32579d8..642cfe9 100644 --- a/app/schemas/documents/document_sharing.py +++ b/app/schemas/documents/document_sharing.py @@ -5,8 +5,7 @@ from app.schemas.documents.bands import DocumentSharingBase -class DocumentSharingCreate(DocumentSharingBase): - ... +class DocumentSharingCreate(DocumentSharingBase): ... class DocumentSharingRead(DocumentSharingBase): @@ -18,5 +17,5 @@ class Config: class SharingRequest(BaseModel): - visits: int = 1 # default value of visits (1) - share_to: Optional[List[str]] = None # emails, or usernames of users to share. + visits: int = 1 # default value of visits (1) + share_to: Optional[List[str]] = None # emails, or usernames of users to share. diff --git a/migrations/env.py b/migrations/env.py index a5b9bbe..fb4cffa 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -72,9 +72,7 @@ def run_migrations_online() -> None: ) with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() diff --git a/migrations/versions/2a02384ab925_initial_almebic.py b/migrations/versions/2a02384ab925_initial_almebic.py index 58f1595..f23c173 100644 --- a/migrations/versions/2a02384ab925_initial_almebic.py +++ b/migrations/versions/2a02384ab925_initial_almebic.py @@ -5,6 +5,7 @@ Create Date: 2023-11-01 20:51:23.621851 """ + from typing import Sequence, Union from alembic import op @@ -12,7 +13,7 @@ from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision: str = '2a02384ab925' +revision: str = "2a02384ab925" down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -20,76 +21,117 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('notify', - sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), - sa.Column('receiver_id', sa.String(), nullable=False), - sa.Column('message', sa.Text(), nullable=False), - sa.Column('status', sa.Enum('read', 'unread', name='notifyenum'), nullable=True), - sa.Column('notified_at', sa.DateTime(timezone=True), server_default=sa.text('NOW()'), nullable=False), - sa.PrimaryKeyConstraint('id') + op.create_table( + "notify", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("receiver_id", sa.String(), nullable=False), + sa.Column("message", sa.Text(), nullable=False), + sa.Column( + "status", sa.Enum("read", "unread", name="notifyenum"), nullable=True + ), + sa.Column( + "notified_at", + sa.DateTime(timezone=True), + server_default=sa.text("NOW()"), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index(op.f("ix_notify_id"), "notify", ["id"], unique=False) + op.create_table( + "users", + sa.Column("id", sa.String(length=26), nullable=False), + sa.Column("username", sa.String(), nullable=False), + sa.Column("email", sa.String(), nullable=False), + sa.Column("password", sa.Text(), nullable=False), + sa.Column( + "user_since", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("email"), + sa.UniqueConstraint("username"), ) - op.create_index(op.f('ix_notify_id'), 'notify', ['id'], unique=False) - op.create_table('users', - sa.Column('id', sa.String(length=26), nullable=False), - sa.Column('username', sa.String(), nullable=False), - sa.Column('email', sa.String(), nullable=False), - sa.Column('password', sa.Text(), nullable=False), - sa.Column('user_since', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('email'), - sa.UniqueConstraint('username') + op.create_index(op.f("ix_users_id"), "users", ["id"], unique=True) + op.create_table( + "document_metadata", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("owner_id", sa.String(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("s3_url", sa.String(), nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + server_default=sa.text("NOW()"), + nullable=False, + ), + sa.Column("size", sa.Integer(), nullable=True), + sa.Column("file_type", sa.String(), nullable=True), + sa.Column("tags", sa.ARRAY(sa.String()), nullable=True), + sa.Column("categories", sa.ARRAY(sa.String()), nullable=True), + sa.Column( + "status", + sa.Enum( + "public", "private", "shared", "deleted", "archived", name="statusenum" + ), + nullable=True, + ), + sa.Column("file_hash", sa.String(), nullable=True), + sa.Column("access_to", sa.ARRAY(sa.String()), nullable=True), + sa.ForeignKeyConstraint( + ["owner_id"], + ["users.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("s3_url"), ) - op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=True) - op.create_table('document_metadata', - sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), - sa.Column('owner_id', sa.String(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.Column('s3_url', sa.String(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('NOW()'), nullable=False), - sa.Column('size', sa.Integer(), nullable=True), - sa.Column('file_type', sa.String(), nullable=True), - sa.Column('tags', sa.ARRAY(sa.String()), nullable=True), - sa.Column('categories', sa.ARRAY(sa.String()), nullable=True), - sa.Column('status', sa.Enum('public', 'private', 'shared', 'deleted', 'archived', name='statusenum'), nullable=True), - sa.Column('file_hash', sa.String(), nullable=True), - sa.Column('access_to', sa.ARRAY(sa.String()), nullable=True), - sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('s3_url') + op.create_index( + op.f("ix_document_metadata_id"), "document_metadata", ["id"], unique=False ) - op.create_index(op.f('ix_document_metadata_id'), 'document_metadata', ['id'], unique=False) - op.create_table('share_url', - sa.Column('url_id', sa.String(), nullable=False), - sa.Column('filename', sa.String(), nullable=False), - sa.Column('owner_id', sa.String(), nullable=False), - sa.Column('url', sa.String(), nullable=True), - sa.Column('expires_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('visits', sa.Integer(), nullable=True), - sa.Column('share_to', sa.ARRAY(sa.String()), nullable=True), - sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ), - sa.PrimaryKeyConstraint('url_id'), - sa.UniqueConstraint('filename'), - sa.UniqueConstraint('url'), - sa.UniqueConstraint('url_id') + op.create_table( + "share_url", + sa.Column("url_id", sa.String(), nullable=False), + sa.Column("filename", sa.String(), nullable=False), + sa.Column("owner_id", sa.String(), nullable=False), + sa.Column("url", sa.String(), nullable=True), + sa.Column("expires_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("visits", sa.Integer(), nullable=True), + sa.Column("share_to", sa.ARRAY(sa.String()), nullable=True), + sa.ForeignKeyConstraint( + ["owner_id"], + ["users.id"], + ), + sa.PrimaryKeyConstraint("url_id"), + sa.UniqueConstraint("filename"), + sa.UniqueConstraint("url"), + sa.UniqueConstraint("url_id"), ) - op.create_table('doc_user_access', - sa.Column('doc_id', postgresql.UUID(as_uuid=True), nullable=True), - sa.Column('user_id', sa.String(length=26), nullable=True), - sa.ForeignKeyConstraint(['doc_id'], ['document_metadata.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), - sa.UniqueConstraint('doc_id', 'user_id', name='uq_doc_user_access_doc_user') + op.create_table( + "doc_user_access", + sa.Column("doc_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("user_id", sa.String(length=26), nullable=True), + sa.ForeignKeyConstraint( + ["doc_id"], ["document_metadata.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["users.id"], + ), + sa.UniqueConstraint("doc_id", "user_id", name="uq_doc_user_access_doc_user"), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('doc_user_access') - op.drop_table('share_url') - op.drop_index(op.f('ix_document_metadata_id'), table_name='document_metadata') - op.drop_table('document_metadata') - op.drop_index(op.f('ix_users_id'), table_name='users') - op.drop_table('users') - op.drop_index(op.f('ix_notify_id'), table_name='notify') - op.drop_table('notify') + op.drop_table("doc_user_access") + op.drop_table("share_url") + op.drop_index(op.f("ix_document_metadata_id"), table_name="document_metadata") + op.drop_table("document_metadata") + op.drop_index(op.f("ix_users_id"), table_name="users") + op.drop_table("users") + op.drop_index(op.f("ix_notify_id"), table_name="notify") + op.drop_table("notify") # ### end Alembic commands ###