Repository: jiisanda/docflow Branch: master Commit: 007aa9dec1fb Files: 83 Total size: 252.9 KB Directory structure: gitextract_0gflb5ec/ ├── .dockerignore ├── .github/ │ └── workflows/ │ └── python-app.yml ├── .gitignore ├── .gitmodules ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── TODO.md ├── __init__.py ├── alembic.ini ├── api.Dockerfile ├── app/ │ ├── __init__.py │ ├── api/ │ │ ├── __init__.py │ │ ├── dependencies/ │ │ │ ├── __init__.py │ │ │ ├── auth_utils.py │ │ │ ├── constants.py │ │ │ ├── mail_service.py │ │ │ └── repositories.py │ │ ├── router.py │ │ └── routes/ │ │ ├── __init__.py │ │ ├── auth/ │ │ │ ├── __init__.py │ │ │ └── auth.py │ │ └── documents/ │ │ ├── __init__.py │ │ ├── document.py │ │ ├── document_organization.py │ │ ├── document_sharing.py │ │ ├── documents_metadata.py │ │ └── notify.py │ ├── core/ │ │ ├── __init__.py │ │ ├── config.py │ │ └── exceptions.py │ ├── db/ │ │ ├── __init__.py │ │ ├── models.py │ │ ├── repositories/ │ │ │ ├── __init__.py │ │ │ ├── auth/ │ │ │ │ ├── __init__.py │ │ │ │ └── auth.py │ │ │ └── documents/ │ │ │ ├── __init__.py │ │ │ ├── document_organization.py │ │ │ ├── document_sharing.py │ │ │ ├── documents.py │ │ │ ├── documents_metadata.py │ │ │ └── notify.py │ │ └── tables/ │ │ ├── __init__.py │ │ ├── auth/ │ │ │ ├── __init__.py │ │ │ └── auth.py │ │ ├── base_class.py │ │ └── documents/ │ │ ├── __init__.py │ │ ├── document_sharing.py │ │ ├── documents_metadata.py │ │ └── notify.py │ ├── docs/ │ │ ├── DocFlow-DocumentManagementAPI.postman_collection.json │ │ ├── commands/ │ │ │ ├── docker.md │ │ │ └── postgres.md │ │ ├── features/ │ │ │ ├── postman.md │ │ │ ├── preview.md │ │ │ ├── sharing.md │ │ │ └── upload.md │ │ ├── issues.txt │ │ └── setup.md │ ├── logs/ │ │ ├── __init__.py │ │ └── logger.py │ ├── main.py │ ├── schemas/ │ │ ├── __init__.py │ │ ├── auth/ │ │ │ ├── __init__.py │ │ │ ├── auth.py │ │ │ └── bands.py │ │ └── documents/ │ │ ├── __init__.py │ │ ├── bands.py │ │ ├── document_sharing.py │ │ └── documents_metadata.py │ └── scripts/ │ ├── create_database.sql │ └── init_bucket.py ├── docker-compose.override.yml ├── docker-compose.prod.yml ├── docker-compose.yml ├── hello.txt ├── migrations/ │ ├── __init__.py │ ├── env.py │ ├── script.py.mako │ └── versions/ │ ├── 2a02384ab925_initial_almebic.py │ └── __init__.py ├── nginx/ │ └── nginx.conf └── requirements/ └── api.txt ================================================ FILE CONTENTS ================================================ ================================================ FILE: .dockerignore ================================================ .git .idea/ downloads migrations README.md docker-compose.yml app/.env *.log *.pyc __pychache__ ================================================ FILE: .github/workflows/python-app.yml ================================================ # This workflow will install Python dependencies, run tests and lint with a single version of Python name: Python Code Quality and Tests on: push: branches: [ "master" ] pull_request: branches: [ "master" ] permissions: contents: read jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python 3.12 uses: actions/setup-python@v3 with: python-version: "3.12" - name: Install dependencies run: | python -m pip install --upgrade pip pip install flake8 black pytest if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Format check with Black run: | # Check if any files would be reformatted black --check --diff . - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics # - name: Test with pytest # run: | # pytest ================================================ FILE: .gitignore ================================================ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ app/downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ cover/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation app/docs/_build/ # PyBuilder .pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv # For a library or package, you might want to ignore these files since the code is # intended to run in multiple environments; otherwise, check them in: # .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # poetry # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. # This is especially recommended for binary packages to ensure reproducibility, and is more # commonly ignored for libraries. # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control #poetry.lock # pdm # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. #pdm.lock # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it # in version control. # https://pdm.fming.dev/#use-with-ide .pdm.toml # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments app/.env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ # pytype static type analyzer .pytype/ # Cython debug symbols cython_debug/ # PyCharm # JetBrains specific template is maintained in a separate JetBrains.gitignore that can # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. .idea/ # .pem file *.pem downloads/ ================================================ FILE: .gitmodules ================================================ [submodule "ui"] path = ui url = https://github.com/jiisanda/docflow-ui.git ================================================ FILE: CODE_OF_CONDUCT.md ================================================ # Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at . All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see the FAQ at https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2023 jiisanda Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ # DocFlow - Document Management API

![Python](https://img.shields.io/badge/python-3670A0?style=for-the-badge&logo=python&logoColor=ffdd54) ![FastAPI](https://img.shields.io/badge/FastAPI-005571?style=for-the-badge&logo=fastapi) ![Postgres](https://img.shields.io/badge/postgres-%23316192.svg?style=for-the-badge&logo=postgresql&logoColor=white) ![AWS](https://img.shields.io/badge/AWS-%23FF9900.svg?style=for-the-badge&logo=amazon-aws&logoColor=white) ![JWT](https://img.shields.io/badge/JWT-black?style=for-the-badge&logo=JSON%20web%20tokens) ![Swagger](https://img.shields.io/badge/-Swagger-%23Clojure?style=for-the-badge&logo=swagger&logoColor=white) ![Github Pages](https://img.shields.io/badge/github%20pages-121013?style=for-the-badge&logo=github&logoColor=white) ![GMail](https://img.shields.io/badge/Gmail-D14836?style=for-the-badge&logo=gmail&logoColor=white) DocFlow is a powerful Document Management API designed to streamline document handling, including seamless uploading, downloading, organization, versioning, sharing, and more. ## 😎 Upcoming Updates - 🟨 Document Interactions - Adding Comments and Tags - 🟨 Import documents from unread emails - 🟨 Video Preview - 🟨 Adding custom metadata fields to document - 🟨 2-factor authentication - 🟨 Storage quota per user? (Maybe to enable limit storage per user) - 🟨 Bulk file importer ## 🚀 Key Features - 💡 Document Upload and Download - 💡 Organization and Searching - 💡 Versioning - 💡 Sharing - 💡 Authentication and Authorization - 💡 Access Control List - 💡 Deletion and Archiving - 💡 Document Preview - 💡 Send file via Email - 💡 Minio Support—for on-premise object storage ## 📖 API Documentation and Image Explore the [API Documentation](https://documenter.getpostman.com/view/20984268/2s9YRGxUcp) for detailed information on how to use DocFlow's features. Details about features and commands can be found [here](app/docs). Download docker image from [docker-hub](https://hub.docker.com/r/jiisanda/docflow). Or just run ```commandline docker pull jiisanda/docflow:1 ``` ## 🔸 Setup Docflow Follow the steps outlined in the [setup.md](app/docs/setup.md) file. ## 🧩 Implementation Detail | Features | Implementation Detail | |----------------------------------|------------------------------------------------------------------| | Upload | [Detail](https://github.com/jiisanda/docflow#-document-upload) | | Download | [Detail](https://github.com/jiisanda/docflow#-document-download) | | Sharing | [Detail](https://github.com/jiisanda/docflow#-document-sharing) | | Document Preview | [Detail](https://github.com/jiisanda/docflow#-document-preview) | ### 📤 Document Upload Here's how documents are uploaded in DocFlow: ![upload-document](app/docs/imgs/document/document_upload.png) For a detailed explanation, visit the [Document Upload Documentation](app/docs/features/upload.md). ### 📥 Document Download Here's how a user can download a file in DocFlow. ![download-document](app/docs/imgs/document/docflow_download.png) For detailed explanation, visit the [Document Download Documentation](). ### 📨 Document Sharing Learn how to share documents in DocFlow: ![share-document](app/docs/imgs/sharing/document_sharing.png) For detailed instructions, visit the [Document Sharing Documentation](app/docs/features/sharing.md). ### 👀 Document Preview Here's how the preview of docs works in DocFlow. ![preview-document](app/docs/imgs/document/document_preview.png) For detailed instructions, visit the [Document Preview Documentation](app/docs/features/preview.md) ## 📜 License [![Licence](https://img.shields.io/github/license/Ileriayo/markdown-badges?style=for-the-badge)](./LICENSE) ## 📧 Contact Us For any questions or support, please [contact](mailto:harshjaiswal2307@gmail.com). Test DocFlow to manage documents seamlessly! ================================================ FILE: TODO.md ================================================ # ✨ TODO Following features are to be added and open for contributions: - 🟨 Document Interactions - Adding Comments and Tags - 🟨 Import documents from unread emails - 🟨 Video Preview - 🟨 Adding custom metadata fields to document - 🟨 2-factor authentication - 🟨 Storage quota per user? (Maybe to enable limit storage per user) - 🟨 Bulk file importer - ⭕ Group Share : Share a document to a group of users Needs: Group creation - ⭕ Shared file history: History of all the shared files ================================================ FILE: __init__.py ================================================ ================================================ FILE: alembic.ini ================================================ # A generic, single database configuration. [alembic] # path to migration scripts script_location = migrations # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s # Uncomment the line below if you want the files to be prepended with date and time # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file # for all available tokens # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s # sys.path path, will be prepended to sys.path if present. # defaults to the current working directory. prepend_sys_path = . # timezone to use when rendering the date within the migration file # as well as the filename. # If specified, requires the python-dateutil library that can be # installed by adding `alembic[tz]` to the pip requirements # string value is passed to dateutil.tz.gettz() # leave blank for localtime # timezone = # max length of characters to apply to the # "slug" field # truncate_slug_length = 40 # set to 'true' to run the environment during # the 'revision' command, regardless of autogenerate # revision_environment = false # set to 'true' to allow .pyc and .pyo files without # a source .py file to be detected as revisions in the # versions/ directory # sourceless = false # version location specification; This defaults # to migrations/versions. When using multiple version # directories, initial revisions must be specified with --version-path. # The path separator used here should be the separator specified by "version_path_separator" below. # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions # version path separator; As mentioned above, this is the character used to split # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. # Valid values for version_path_separator are: # # version_path_separator = : # version_path_separator = ; # version_path_separator = space version_path_separator = os # Use os.pathsep. Default configuration used for new projects. # set to 'true' to search source files recursively # in each "version_locations" directory # new in Alembic version 1.10 # recursive_version_locations = false # the output encoding used when revision files # are written from script.py.mako # output_encoding = utf-8 sqlalchemy.url = [post_write_hooks] # post_write_hooks defines scripts or Python functions that are run # on newly generated revision scripts. See the documentation for further # detail and examples # format using "black" - use the console_scripts runner, against the "black" entrypoint # hooks = black # black.type = console_scripts # black.entrypoint = black # black.options = -l 79 REVISION_SCRIPT_FILENAME # lint with attempts to fix using "ruff" - use the exec runner, execute a binary # hooks = ruff # ruff.type = exec # ruff.executable = %(here)s/.venv/bin/ruff # ruff.options = --fix REVISION_SCRIPT_FILENAME # Logging configuration [loggers] keys = root,sqlalchemy,alembic [handlers] keys = console [formatters] keys = generic [logger_root] level = WARN handlers = console qualname = [logger_sqlalchemy] level = WARN handlers = qualname = sqlalchemy.engine [logger_alembic] level = INFO handlers = qualname = alembic [handler_console] class = StreamHandler args = (sys.stderr,) level = NOTSET formatter = generic [formatter_generic] format = %(levelname)-5.5s [%(name)s] %(message)s datefmt = %H:%M:%S ================================================ FILE: api.Dockerfile ================================================ FROM python:3.12 LABEL authors="jiisanda" WORKDIR /usr/src/app COPY requirements/api.txt ./ RUN pip install --upgrade pip RUN pip install --no-cache-dir -r api.txt # Fix the path here too COPY . . CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] ================================================ FILE: app/__init__.py ================================================ ================================================ FILE: app/api/__init__.py ================================================ ================================================ FILE: app/api/dependencies/__init__.py ================================================ ================================================ FILE: app/api/dependencies/auth_utils.py ================================================ from datetime import datetime, timedelta from typing import Any, Dict from fastapi import Depends from fastapi.security import OAuth2PasswordBearer from jose import jwt, JWTError from passlib.context import CryptContext from app.core.config import settings from app.core.exceptions import http_401 from app.schemas.auth.bands import TokenData # Password Hashing password_context = CryptContext(schemes=["bcrypt"], deprecated="auto") # oauth2 scheme oauth2_scheme = OAuth2PasswordBearer(tokenUrl="api/u/login", scheme_name="JWT") def get_hashed_password(password: str) -> str: return password_context.hash(password) def verify_password(password: str, hashed_password: str) -> bool: return password_context.verify(password, hashed_password) def create_access_token( subject: Dict[str, Any], expires_delta: timedelta = None ) -> str: if expires_delta is not None: expires_delta = datetime.utcnow() + expires_delta else: expires_delta = datetime.utcnow() + timedelta( minutes=settings.access_token_expire_min ) to_encode = { "exp": expires_delta, "id": subject.get("id"), "username": subject.get("username"), } return jwt.encode(to_encode, settings.jwt_secret_key, settings.algorithm) def create_refresh_token( subject: Dict[str, Any], expires_delta: timedelta = None ) -> str: if expires_delta is not None: expires_delta = datetime.utcnow() + expires_delta else: expires_delta = datetime.utcnow() + timedelta( minutes=settings.refresh_token_expire_min ) to_encode = { "exp": expires_delta, "id": subject.get("id"), "username": subject.get("username"), } return jwt.encode(to_encode, settings.jwt_secret_key, settings.algorithm) def verify_access_token(token: str, credentials_exception): try: payload = jwt.decode( token, settings.jwt_secret_key, algorithms=[settings.algorithm] ) uid = payload.get("id") username = payload.get("username") if username is None: raise credentials_exception token_data = TokenData(id=uid, username=username) except JWTError as e: raise credentials_exception from e return token_data def get_current_user(token: str = Depends(oauth2_scheme)): credentials_exception = http_401( msg="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"} ) return verify_access_token(token=token, credentials_exception=credentials_exception) ================================================ FILE: app/api/dependencies/constants.py ================================================ SUPPORTED_FILE_TYPES = { "image/jpeg": "jpg", "image/png": "png", "image/gif": "gif", "image/bmp": "bmp", "image/tiff": "tiff", "application/pdf": "pdf", "text/plain": "txt", "application/msword": "doc", "application/vnd.openxmlformats-officedocument.wordprocessingml.document": "docx", "application/vnd.ms-excel": "xls", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": "xlsx", "application/vnd.ms-powerpoint": "ppt", "application/vnd.openxmlformats-officedocument.presentationml.presentation": "pptx", "application/zip": "zip", "application/x-gzip": "gzip", "application/x-tar": "tar", "application/x-bzip2": "bz2", "application/x-7z-compressed": "7z", "application/xml": "xml", "application/json": "json", "video/mp4": "mp4", "video/mpeg": "mpeg", "video/quicktime": "mov", "audio/mpeg": "mp3", "audio/wav": "wav", "audio/x-ms-wma": "wma", } ================================================ FILE: app/api/dependencies/mail_service.py ================================================ import os.path import smtplib import ssl from email import encoders from email.mime.base import MIMEBase from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from app.core.config import settings from app.core.exceptions import http_500 def mail_service( mail_to: str, subject: str, content: str, file_path: str = None ) -> None: port = settings.smtp_port # For starttls smtp_server = settings.smtp_server sender_email = settings.email receiver_email = mail_to password = settings.app_pw # Creating Multipart message and headers message = MIMEMultipart() message["Subject"] = subject message.attach(MIMEText(content, _subtype="plain")) # Open file in binary mode if file_path is not None: with open(file_path, "rb") as attachment: # Below line adds file as application/octet_stream part = MIMEBase("application", "octet_stream") part.set_payload(attachment.read()) # Encoding file in ASCII characters for sending emails encoders.encode_base64(part) # header as attachment part.add_header( "Content-Disposition", f"attachment; filename= {os.path.basename(file_path)}", ) message.attach(part) try: context = ssl.create_default_context() with smtplib.SMTP(smtp_server, port) as server: server.ehlo() server.starttls(context=context) server.ehlo() server.login(sender_email, password) server.sendmail(sender_email, receiver_email, message.as_string()) except Exception as e: raise http_500(msg="There was some error sending email...") from e ================================================ FILE: app/api/dependencies/repositories.py ================================================ import os.path import re from typing import Optional import ulid from fastapi import Depends from fastapi.responses import FileResponse from sqlalchemy.ext.asyncio import AsyncSession from app.core.config import settings from app.db.models import async_session class TempFileResponse(FileResponse): def __init__(self, path, *args, **kwargs): super().__init__(path, *args, **kwargs) self.file_path = path def __del__(self): if os.path.exists(self.file_path): os.remove(self.file_path) async def get_db() -> AsyncSession: async with async_session() as session: yield session await session.commit() def get_repository(repository): def _get_repository(session: AsyncSession = Depends(get_db)): return repository(session) return _get_repository async def get_s3_url(key: str) -> str: if settings.s3_endpoint_url: # minio URL format return f"{settings.s3_endpoint_url}/{settings.s3_bucket}/{key}" return f"https://{settings.s3_bucket}.s3.{settings.aws_region}.amazonaws.com/{key}" async def get_key(s3_url: str) -> Optional[str]: if settings.s3_endpoint_url: # minio url format: http://host:9000/bucket/key # remove the endpoint and bucket form the URL url_without_endpoint = s3_url.replace(settings.s3_endpoint_url, "") url_without_bucket = url_without_endpoint.replace(f"/{settings.s3_bucket}/", "") return url_without_bucket.lstrip("/") else: pattern = ( f"https://{settings.s3_bucket}" + r"\.s3\." + settings.aws_region + r"\.amazonaws\.com/" + r"(.+)" ) if match := re.search(pattern, s3_url): return match[1] return None def get_ulid(): return str(ulid.ULID()) ================================================ FILE: app/api/router.py ================================================ from fastapi import APIRouter from app.api.routes.auth.auth import router as auth_router from app.api.routes.documents.documents_metadata import ( router as documents_metadata_router, ) from app.api.routes.documents.document import router as documents_router from app.api.routes.documents.document_organization import ( router as document_organization_router, ) from app.api.routes.documents.document_sharing import router as document_sharing_router from app.api.routes.documents.notify import router as notify_router router = APIRouter() router.include_router(auth_router, prefix="/u") router.include_router(documents_router, prefix="") router.include_router(notify_router, prefix="/notifications") router.include_router(documents_metadata_router, prefix="/metadata") router.include_router(document_organization_router, prefix="/filter") router.include_router(document_sharing_router) ================================================ FILE: app/api/routes/__init__.py ================================================ ================================================ FILE: app/api/routes/auth/__init__.py ================================================ ================================================ FILE: app/api/routes/auth/auth.py ================================================ from fastapi import APIRouter, status, Depends from fastapi.security import OAuth2PasswordRequestForm from app.api.dependencies.auth_utils import get_current_user from app.api.dependencies.repositories import get_repository from app.schemas.auth.bands import UserOut, UserAuth, TokenData from app.db.repositories.auth.auth import AuthRepository router = APIRouter(tags=["User Auth"]) @router.post( "/signup", response_model=UserOut, status_code=status.HTTP_201_CREATED, name="signup", summary="Create new user", ) async def signup( data: UserAuth, repository: AuthRepository = Depends(get_repository(AuthRepository)) ): return await repository.signup(userdata=data) @router.post( "/login", status_code=status.HTTP_200_OK, name="login", summary="Create access and refresh tokens for user", ) async def login( form_data: OAuth2PasswordRequestForm = Depends(), repository: AuthRepository = Depends(get_repository(AuthRepository)), ): return await repository.login(ipdata=form_data) @router.get( "/me", status_code=status.HTTP_200_OK, response_model=TokenData, name="get_user_data", summary="Get details of currently logged in user", ) async def get_me(user: TokenData = Depends(get_current_user)): return user ================================================ FILE: app/api/routes/documents/__init__.py ================================================ ================================================ FILE: app/api/routes/documents/document.py ================================================ from typing import Dict, List, Optional, Union from uuid import UUID from fastapi import APIRouter, status, File, UploadFile, Depends from fastapi.responses import FileResponse from sqlalchemy.engine import Row from app.api.dependencies.auth_utils import get_current_user from app.api.dependencies.repositories import get_repository from app.core.exceptions import http_400, http_404 from app.db.repositories.auth.auth import AuthRepository from app.db.repositories.documents.documents import ( DocumentRepository, perm_delete as perm_delete_file, ) from app.db.repositories.documents.documents_metadata import DocumentMetadataRepository from app.schemas.auth.bands import TokenData from app.schemas.documents.documents_metadata import DocumentMetadataRead router = APIRouter(tags=["Document"]) @router.post( "/upload", response_model=None, status_code=status.HTTP_201_CREATED, name="upload_document", ) async def upload( files: List[UploadFile] = File(...), folder: Optional[str] = None, repository: DocumentRepository = Depends(DocumentRepository), metadata_repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user_repository: AuthRepository = Depends(get_repository(AuthRepository)), user: TokenData = Depends(get_current_user), ) -> Union[List[DocumentMetadataRead], List[Dict[str, str]]]: """ Uploads a document to the specified folder. Args: files (List[UploadFile]): The files to be uploaded. folder (Optional[str]): The folder where the document will be stored. Defaults to None. repository (DocumentRepository): The repository for managing documents. metadata_repository (DocumentMetadataRepository): The repository for managing document metadata. user_repository (AuthRepository): The repository for managing user authentication. user (TokenData): The token data of the authenticated user. Returns: Union[DocumentMetadataRead, Dict[str, str]]: If the file is added, returns the uploaded document metadata. If the file is updated, returns the patched document metadata. Otherwise, returns a response dictionary. Raises: HTTP_400: If no input file is provided. """ if not files: raise http_400(msg="No input files provided...") responses = [] for file in files: response = await repository.upload( metadata_repo=metadata_repository, user_repo=user_repository, file=file, folder=folder, user=user, ) if response["response"] == "file_added": responses.append( await metadata_repository.upload(document_upload=response["upload"]) ) elif response["response"] == "file_updated": responses.append( await metadata_repository.patch( document=response["upload"]["name"], document_patch=response["upload"], owner=user, user_repo=user_repository, is_owner=response["is_owner"], ) ) return responses @router.get( "/file/{file_name}/download", status_code=status.HTTP_200_OK, name="download_document", ) async def download( file_name: str, repository: DocumentRepository = Depends(DocumentRepository), metadata_repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> object: """ Downloads a document with the specified file name. Args: file_name (str): The name of the file to be downloaded. repository (DocumentRepository): The repository for managing documents. metadata_repository (DocumentMetadataRepository): The repository for managing document metadata. user (TokenData): The token data of the authenticated user. Returns: object: The downloaded document. Raises: HTTP_400: If no file name is provided. HTTP_404: If no file with the specified name is found. """ if not file_name: raise http_400(msg="No file name...") try: get_document_metadata = dict( await metadata_repository.get(document=file_name, owner=user) ) return await repository.download( s3_url=get_document_metadata["s3_url"], name=get_document_metadata["name"] ) except Exception as e: raise http_404(msg=f"No file with {file_name}") from e @router.get( "/trash", status_code=status.HTTP_200_OK, response_model=None, name="list_of_bin", ) async def list_bin( metadata_repo: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), owner: TokenData = Depends(get_current_user), ) -> Dict[str, List[Row | Row] | int]: """ List bin. Args: metadata_repo: The document metadata repository. owner: The token data of the owner. Returns: Dict[str, List[Row | Row] | int]: The list of bin. """ return await metadata_repo.bin_list(owner=owner) @router.delete( "/trash", status_code=status.HTTP_204_NO_CONTENT, name="empty_trash", ) async def empty_trash( metadata_repo: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> None: """ Deletes all documents in the trash bin for the authenticated user. Args: metadata_repo (DocumentMetadataRepository): The repository for accessing document metadata. user (TokenData): The token data of the authenticated user. Returns: None """ return await metadata_repo.empty_bin(owner=user) @router.delete( "/trash/{file_name}", status_code=status.HTTP_204_NO_CONTENT, name="permanently_delete_doc", ) async def perm_delete( file_name: str = None, delete_all: bool = False, metadata_repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> None: """ Permanently deletes a document. Args: file_name (str, optional): The name of the file to be permanently deleted. Defaults to None. delete_all (bool): Flag indicating whether to delete all documents in the bin. Defaults to False. metadata_repository (DocumentMetadataRepository): The repository for managing document metadata. user (TokenData): The token data of the authenticated user. Returns: None: If the file is permanently deleted. Raises: HTTP_404: If no file with the specified name is found. """ try: get_documents_metadata = dict(await metadata_repository.bin_list(owner=user)) if len(get_documents_metadata["response"]) > 0: return await perm_delete_file( file=file_name, delete_all=delete_all, meta_repo=metadata_repository, user=user, ) except Exception as e: raise http_404(msg=f"No file with {file_name}") from e @router.post( "/restore/{file}", status_code=status.HTTP_200_OK, response_model=DocumentMetadataRead, name="restore_from_bin", ) async def restore_bin( file: str, metadata_repo: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> DocumentMetadataRead: """ Restore bin. Args: file: The file to restore. metadata_repo: The document metadata repository. user: The token data of the user. Returns: DocumentMetadataRead: The restored document metadata. """ return await metadata_repo.restore(file=file, owner=user) @router.delete( "/{file_name}", status_code=status.HTTP_204_NO_CONTENT, name="add_to_bin" ) async def add_to_bin( file_name: str, metadata_repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> None: """ Adds a document to the bin for deletion. Args: file_name (str): The name of the file to be added to the bin. metadata_repository (DocumentMetadataRepository): The repository for managing document metadata. user (TokenData): The token data of the authenticated user. Returns: None: If the file is added to the bin. """ return await metadata_repository.delete(document=file_name, owner=user) @router.get( "/preview/{document}", status_code=status.HTTP_200_OK, name="preview_document", ) async def get_document_preview( document: Union[str, UUID], repository: DocumentRepository = Depends(DocumentRepository), metadata_repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> FileResponse: """ Get the preview of a document. Args: document (Union[str, UUID]): The ID or name of the document. repository (DocumentRepository): The repository for accessing document data. metadata_repository (DocumentMetadataRepository): The repository for accessing document metadata. user (TokenData): The user token data. Returns: FileResponse: The file response containing the document preview. Raises: HTTP_404: If the document ID or name is not provided or if the document does not exist. HTTP_400: If the file type is not supported for preview. """ if not document: raise http_404(msg="Enter document id or name.") try: get_document_metadata = dict( await metadata_repository.get(document=document, owner=user) ) return await repository.preview(document=get_document_metadata) except TypeError as e: raise http_404(msg="Document does not exists.") from e except ValueError as e: raise http_400(msg="File type is not supported for preview") from e ================================================ FILE: app/api/routes/documents/document_organization.py ================================================ from fastapi import APIRouter, Depends, status, Query from app.api.dependencies.repositories import get_repository from app.api.dependencies.auth_utils import get_current_user from app.db.repositories.documents.documents_metadata import DocumentMetadataRepository from app.db.repositories.documents.document_organization import DocumentOrgRepository from app.schemas.auth.bands import TokenData router = APIRouter(tags=["Document Search"]) @router.get( "", # response_model=List[DocumentMetadataRead], status_code=status.HTTP_200_OK, name="search_document", ) async def search_document( limit: int = Query(default=10, lt=100), offset: int = Query(default=0), tag: str = None, category: str = None, file_types: str = None, doc_status: str = None, repository: DocumentOrgRepository = Depends(DocumentOrgRepository), repository_metadata: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ): """ Searches for documents based on specified criteria. Args: limit (int): The maximum number of documents to retrieve. Defaults to 10. offset (int): The number of documents to skip. Defaults to 0. tag (str, optional): The tag to filter documents by. Defaults to None. category (str, optional): The category to filter documents by. Defaults to None. file_types (str, optional): The file types to filter documents by. Defaults to None. doc_status (str, optional): The status of documents to filter by. Defaults to None. repository (DocumentOrgRepository): The repository for managing document organization. repository_metadata (DocumentMetadataRepository): The repository for managing document metadata. user (TokenData): The token data of the authenticated user. Returns: List[DocumentMetadataRead] or List[Dict[str, Any]]: The list of matching documents. """ doc_list = await repository_metadata.doc_list( limit=limit, offset=offset, owner=user ) doc_list = doc_list[f"documents of {user.username}"] if tag is None and category is None and file_types is None and doc_status is None: return doc_list return await repository.search_doc( docs=doc_list, tags=tag, categories=category, file_types=file_types, status=doc_status, ) ================================================ FILE: app/api/routes/documents/document_sharing.py ================================================ from typing import Union from uuid import UUID from fastapi import APIRouter, Depends, status from fastapi.responses import RedirectResponse from app.api.dependencies.auth_utils import get_current_user from app.api.dependencies.repositories import get_repository, get_key from app.core.exceptions import http_404 from app.db.repositories.auth.auth import AuthRepository from app.db.repositories.documents.documents import DocumentRepository from app.db.repositories.documents.documents_metadata import DocumentMetadataRepository from app.db.repositories.documents.document_sharing import DocumentSharingRepository from app.db.repositories.documents.notify import NotifyRepo from app.schemas.auth.bands import TokenData from app.schemas.documents.document_sharing import SharingRequest router = APIRouter(tags=["Document Sharing"]) @router.post( "/share-link/{document}", status_code=status.HTTP_200_OK, name="share_document_link" ) async def share_link_document( document: Union[str, UUID], share_request: SharingRequest, repository: DocumentSharingRepository = Depends( get_repository(DocumentSharingRepository) ), auth_repository: AuthRepository = Depends(get_repository(AuthRepository)), metadata_repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), notify_repository: NotifyRepo = Depends(get_repository(NotifyRepo)), user: TokenData = Depends(get_current_user), ): """ Shares a document link with another user, sends mail and notifies the receiver. Args: document (Union[str, UUID]): The ID or name of the document to be shared. share_request (SharingRequest): The sharing request containing the details of the sharing operation. repository (DocumentSharingRepository): The repository for managing document sharing. auth_repository (AuthRepository): The repository for managing User-related queries. metadata_repository (DocumentMetadataRepository): The repository for managing document metadata. notify_repository (NotifyRepo): The repository for managing notification user (TokenData): The token data of the authenticated user. Returns: Dict[str, str]: A dictionary containing the personal URL and shareable link. Raises: HTTP_404: If no document with the specified ID or name is found. """ try: doc = await metadata_repository.get(document=document, owner=user) visits = share_request.visits share_to = share_request.share_to pre_signed_url = await repository.get_presigned_url(doc=doc.__dict__) shareable_link = await repository.get_shareable_link( owner_id=user.id, url=pre_signed_url, visits=visits, filename=doc.__dict__["name"], share_to=share_to, ) if len(share_to) > 0: # Send email to the receiver await repository.send_mail(user=user, mail_to=share_to, link=shareable_link) # send a notification to the receiver await notify_repository.notify( user=user, receivers=share_to, filename=doc.__dict__["name"], auth_repo=auth_repository, ) return {"personal_url": pre_signed_url, "share_this": shareable_link} except KeyError as e: raise http_404(msg=f"No doc: {document}") from e @router.get("/doc/{url_id}", tags=["Document Sharing"]) async def redirect_to_share( url_id: str, repository: DocumentSharingRepository = Depends( get_repository(DocumentSharingRepository) ), user: TokenData = Depends(get_current_user), ): """ Redirects to a shared document URL. Args: url_id (str): The ID of the shared document URL. repository (DocumentSharingRepository): The repository for managing document sharing. user (TokenData): The token data of the authenticated user. Returns: RedirectResponse: A redirect response to the shared document URL. """ if await repository.confirm_access(user=user, url_id=url_id): redirect_url = await repository.get_redirect_url(url_id=url_id) return RedirectResponse(redirect_url) @router.post("/share/{document}", status_code=status.HTTP_200_OK, name="share_document") async def share_document( document: Union[str, UUID], share_request: SharingRequest, notify: bool = True, repository: DocumentSharingRepository = Depends( get_repository(DocumentSharingRepository) ), document_repo: DocumentRepository = Depends(DocumentRepository), metadata_repo: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), notify_repo: NotifyRepo = Depends(get_repository(NotifyRepo)), auth_repo: AuthRepository = Depends(get_repository(AuthRepository)), user: TokenData = Depends(get_current_user), ) -> None: """ Share a document with other users, and notifies if notify is set to True (default). Args: document (Union[str, UUID]): The ID or UUID of the document to be shared. share_request (SharingRequest): The sharing request containing the recipients and permissions. notify (bool, optional): Whether to send notifications to the recipients. Defaults to True. repository (DocumentSharingRepository, optional): The repository for document sharing operations. document_repo (DocumentRepository, optional): The repository for document operations. metadata_repo (DocumentMetadataRepository, optional): The repository for document metadata operations. notify_repo (NotifyRepo, optional): The repository for notification operations. auth_repo (AuthRepository, optional): The repository for authentication operations. user (TokenData, optional): The authenticated user. Raises: HTTP_404: If the document is not found. Returns: None """ if not document: raise http_404(msg="Enter document id or UUID.") try: get_document_metadata = dict( await metadata_repo.get(document=document, owner=user) ) key = await get_key(s3_url=get_document_metadata["s3_url"]) file = await document_repo.get_s3_file_object_body(key=key) return await repository.share_document( filename=get_document_metadata["name"], document_key=key, file=file, share_request=share_request, notify=notify, owner=user, notify_repo=notify_repo, auth_repo=auth_repo, ) except Exception as e: raise http_404() from e ================================================ FILE: app/api/routes/documents/documents_metadata.py ================================================ from typing import Any, Dict, List, Union from uuid import UUID from fastapi import APIRouter, status, Body, Depends, Query, HTTPException from app.api.dependencies.repositories import get_repository from app.api.dependencies.auth_utils import get_current_user from app.core.exceptions import http_404 from app.db.repositories.auth.auth import AuthRepository from app.db.repositories.documents.documents_metadata import DocumentMetadataRepository from app.schemas.auth.bands import TokenData from app.schemas.documents.bands import DocumentMetadataPatch from app.schemas.documents.documents_metadata import ( DocumentMetadataCreate, DocumentMetadataRead, ) router = APIRouter(tags=["Document MetaData"]) @router.post( "/upload", response_model=DocumentMetadataRead, status_code=status.HTTP_201_CREATED, name="upload_documents_metadata", ) async def upload_document_metadata( document_upload: DocumentMetadataCreate = Body(...), repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> DocumentMetadataRead: """ Uploads document metadata. Args: document_upload (DocumentMetadataCreate): The document metadata to be uploaded. repository (DocumentMetadataRepository): The repository for managing document metadata. user (TokenData): The token data of the authenticated user. Returns: DocumentMetadataRead: The uploaded document metadata. """ document_upload.owner_id = user.id return await repository.upload(document_upload=document_upload) @router.get( "", response_model=Dict[str, Union[List[DocumentMetadataRead], Any]], status_code=status.HTTP_200_OK, name="get_documents_metadata", ) async def get_documents_metadata( limit: int = Query(default=10, lt=100), offset: int = Query(default=0), repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> Dict[str, Union[List[DocumentMetadataRead], Any]]: """ Retrieves a list of document metadata. Args: limit (int): The maximum number of documents to retrieve. Defaults to 10. offset (int): The number of documents to skip. Defaults to 0. repository (DocumentMetadataRepository): The repository for managing document metadata. user (TokenData): The token data of the authenticated user. Returns: Dict[str, Union[List[DocumentMetadataRead], Any]]: A dictionary containing the list of document metadata. """ return await repository.doc_list(limit=limit, offset=offset, owner=user) @router.get( "/{document}/detail", response_model=None, status_code=status.HTTP_200_OK, name="get_document-metadata", ) async def get_document_metadata( document: Union[str, UUID], repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> Union[DocumentMetadataRead, HTTPException]: """ Retrieves the metadata of a specific document. Args: document (Union[str, UUID]): The ID or name of the document. repository (DocumentMetadataRepository): The repository for managing document metadata. user (TokenData): The token data of the authenticated user. Returns: Union[DocumentMetadataRead, HTTPException]: The document metadata if found, otherwise an HTTPException. """ return await repository.get(document=document, owner=user) @router.put( "/{document}", response_model=None, status_code=status.HTTP_200_OK, name="update_doc_metadata_details", ) async def update_doc_metadata_details( document: Union[str, UUID], document_patch: DocumentMetadataPatch = Body(...), repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user_repository: AuthRepository = Depends(get_repository(AuthRepository)), user: TokenData = Depends(get_current_user), ) -> Union[DocumentMetadataRead, HTTPException]: """ Updates the details of a document's metadata. Args: document (Union[str, UUID]): The ID or name of the document. document_patch (DocumentMetadataPatch): The document metadata patch containing the updated details. repository (DocumentMetadataRepository): The repository for managing document metadata. user_repository (AuthRepository): The repository for managing user authentication. user (TokenData): The token data of the authenticated user. Returns: Union[DocumentMetadataRead, HTTPException]: The updated document metadata if successful, otherwise an HTTPException. Raises: HTTP_404: If no document with the specified ID or name is found. """ try: await repository.get(document=document, owner=user) except Exception as e: raise http_404(msg=f"No Document with: {document}") from e return await repository.patch( document=document, document_patch=document_patch, owner=user, user_repo=user_repository, is_owner=True, ) @router.delete( "/{document}", status_code=status.HTTP_204_NO_CONTENT, name="delete_document_metadata", ) async def delete_document_metadata( document: Union[str, UUID], repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> None: """ Deletes the metadata of a document and moves it to the bin. Args: document (Union[str, UUID]): The identifier of the document to delete. repository (DocumentMetadataRepository): The repository for accessing document metadata. Defaults to the result of the `get_repository` function with `DocumentMetadataRepository` as the argument. user (TokenData): The token data of the current user. Defaults to the result of the `get_current_user` function. Returns: None (204_NO_CONTENT) Raises: HTTP_404: If no document with the specified identifier is found. """ try: await repository.get(document=document, owner=user) except Exception as e: raise http_404(msg=f"No document with the detail: {document}.") from e return await repository.delete(document=document, owner=user) # Archiving @router.post( "/archive/{file_name}", response_model=DocumentMetadataRead, status_code=status.HTTP_200_OK, name="archive_a_document", ) async def archive( file_name: str, repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> DocumentMetadataRead: """ Archive a document. Args: file_name (str): The name of the file to be archived. repository (DocumentMetadataRepository): The repository for document metadata. user (TokenData): The user token data. Returns: DocumentMetadataRead: The archived document metadata. """ return await repository.archive(file=file_name, user=user) @router.get( "/archive/list", response_model=None, status_code=status.HTTP_200_OK, name="archived_doc_list", ) async def archive_list( repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> Dict[str, List[str] | int]: """ Get the list of archived documents. Args: repository (DocumentMetadataRepository): The repository for document metadata. user (TokenData): The user token data. Returns: Dict[str, List[str] | int]: A dictionary containing the list of archived documents. """ return await repository.archive_list(user=user) @router.post( "/un-archive/{file}", response_model=DocumentMetadataRead, status_code=status.HTTP_200_OK, name="remove_doc_from_archive", ) async def un_archive( file: str, repository: DocumentMetadataRepository = Depends( get_repository(DocumentMetadataRepository) ), user: TokenData = Depends(get_current_user), ) -> DocumentMetadataRead: """ Un-archive a document. Args: file (str): The name of the file to be un-archived. repository (DocumentMetadataRepository): The repository for document metadata. user (TokenData): The user token data. Returns: DocumentMetadataRead: The un-archived document metadata. """ return await repository.un_archive(file=file, user=user) ================================================ FILE: app/api/routes/documents/notify.py ================================================ from typing import List, Union from uuid import UUID from fastapi import APIRouter, status, Depends from app.api.dependencies.auth_utils import get_current_user from app.api.dependencies.repositories import get_repository from app.core.exceptions import http_404 from app.db.repositories.documents.notify import NotifyRepo from app.schemas.auth.bands import TokenData from app.schemas.documents.bands import Notification, NotifyPatchStatus router = APIRouter(tags=["Notification"]) @router.get("", status_code=status.HTTP_200_OK, name="get_notifications") async def get_notifications( repository: NotifyRepo = Depends(get_repository(NotifyRepo)), user: TokenData = Depends(get_current_user), ) -> List[Notification]: """ Get notifications for a user. Args: repository (NotifyRepo): The repository for accessing notification data. user (TokenData): The authenticated user. Returns: List[Notification]: A list of notifications for the user. """ return await repository.get_notifications(user=user) @router.put( path="/{notification_id}", status_code=status.HTTP_200_OK, name="patch_status", ) async def patch_status( updated_status: NotifyPatchStatus = None, notification_id: UUID = None, repository: NotifyRepo = Depends(get_repository(NotifyRepo)), user: TokenData = Depends(get_current_user), ) -> Union[List[Notification], Notification]: """ Patch the status of a notification or mark all notifications as read. Args: updated_status (NotifyPatchStatus, optional): The updated status for the notification. Defaults to None. notification_id (UUID, optional): The ID of the notification to update. Defaults to None. repository (NotifyRepo): The repository for accessing notification data. user (TokenData): The authenticated user. Returns: Union[List[Notification], Notification]: If `mark_as_all_read` is True, returns a list of all notifications marked as read. If `notification_id` is provided, returns the updated notification. Otherwise, raises an HTTP_404 exception. Raises: HTTP_404: If 'notification_id' is not provided and update_status.mark_all is set to False. """ if updated_status.mark_all: return await repository.mark_all_read(user=user) if notification_id: return await repository.update_status( n_id=notification_id, updated_status=updated_status, user=user ) raise http_404( msg="Bad Request: Make sure to either flag mark_all " "or enter notification_id along with correct status as payload." ) @router.delete( path="", status_code=status.HTTP_204_NO_CONTENT, name="clear_all_notifications", ) async def clear_all_notifications( repository: NotifyRepo = Depends(get_repository(NotifyRepo)), user: TokenData = Depends(get_current_user), ) -> None: """ Clear all notifications for a user. Args: repository (NotifyRepo): The repository for accessing notification data. user (TokenData): The authenticated user. Returns: None """ return await repository.clear_notification(user=user) ================================================ FILE: app/core/__init__.py ================================================ ================================================ FILE: app/core/config.py ================================================ import os from typing import Optional from dotenv import load_dotenv from pydantic_settings import BaseSettings load_dotenv() class GlobalConfig(BaseSettings): """ Global Configuration for the FastAPI application. """ title: str = os.environ.get("TITLE", "DocFlow") version: str = "1.0.0" description: str = os.environ.get("DESCRIPTION", "Document Management API") host_url: str = "http://localhost:8000" docs_url: str = "/docs" redoc_url: str = "/redoc" openapi_url: str = "/openapi.json" api_prefix: str = "/v2" debug: bool = str(os.environ.get("DEBUG", "False")).lower() == "true" postgres_user: str = os.environ.get("POSTGRES_USER", "") postgres_password: str = os.environ.get("POSTGRES_PASSWORD", "") postgres_hostname: str = os.environ.get("DATABASE_HOSTNAME", "") postgres_port: int = int(os.environ.get("POSTGRES_PORT", "5432")) postgres_db: str = os.environ.get("POSTGRES_DB", "") # s3 / minio configurations aws_access_key_id: str = os.environ.get("AWS_ACCESS_KEY_ID", "") aws_secret_key: str = os.environ.get("AWS_SECRET_ACCESS_KEY", "") aws_region: str = os.environ.get("AWS_REGION", "us-east-1") # minio doesn't care about a region s3_endpoint_url: Optional[str] = os.environ.get("S3_ENDPOINT_URL") or None s3_bucket: str = os.environ.get("S3_BUCKET", "") s3_test_bucket: Optional[str] = os.environ.get("S3_TEST_BUCKET") or None # user config access_token_expire_min: int = int(os.environ.get("ACCESS_TOKEN_EXPIRE_MIN", "30")) refresh_token_expire_min: int = int(os.environ.get("REFRESH_TOKEN_EXPIRE_MIN", "1440")) algorithm: str = os.environ.get("ALGORITHM", "HS256") jwt_secret_key: str = os.environ.get("JWT_SECRET_KEY", "") jwt_refresh_secret_key: str = os.environ.get("JWT_REFRESH_SECRET_KEY", "") # Email Service smtp_server: str = os.environ.get("SMTP_SERVER", "") smtp_port: int = int(os.environ.get("SMTP_PORT", "587")) email: str = os.environ.get("EMAIL", "") app_pw: str = os.environ.get("APP_PASSWORD", "") @property def db_echo_log(self) -> bool: return self.debug @property def sync_database_url(self) -> str: return ( f"postgresql://{self.postgres_user}:{self.postgres_password}@" f"{self.postgres_hostname}:{self.postgres_port}/{self.postgres_db}" ) @property def async_database_url(self) -> str: return ( f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}@" f"{self.postgres_hostname}:{self.postgres_port}/{self.postgres_db}" ) settings = GlobalConfig() ================================================ FILE: app/core/exceptions.py ================================================ from typing import Dict from fastapi.exceptions import HTTPException from starlette import status def http_400(msg: str = "Bad Request...") -> HTTPException: """Invalid Input""" return HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=msg) def http_401( msg: str = "Unauthorized", headers: Dict[str, str] = None ) -> HTTPException: """Unauthorized Access""" return HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=msg, headers=headers ) def http_403(msg: str = "Forbidden") -> HTTPException: """Forbidden access""" return HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=msg) def http_404(msg: str = "Entity does not exists...") -> HTTPException: """Raised when entity was not found on database.""" return HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=msg) def http_409(msg: str = "Entity already exists...") -> HTTPException: """Raised when entity already exists on database.""" return HTTPException(status_code=status.HTTP_409_CONFLICT, detail=msg) def http_500(msg: str = "Internal Server Error") -> HTTPException: """Raised when error caused due to internal server""" return HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=msg) ================================================ FILE: app/db/__init__.py ================================================ ================================================ FILE: app/db/models.py ================================================ import logging from sqlalchemy import create_engine from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, Session from app.core.config import settings from app.core.exceptions import http_500 logger = logging.getLogger("sqlalchemy") engine = create_engine( url=settings.sync_database_url, echo=settings.db_echo_log, ) async_engine = create_async_engine( url=settings.async_database_url, echo=settings.db_echo_log, query_cache_size=0, ) session = sessionmaker(bind=engine, autocommit=False, autoflush=False) async_session = sessionmaker( bind=async_engine, class_=AsyncSession, autocommit=False, autoflush=False, expire_on_commit=False, ) Base = declarative_base() metadata = Base.metadata async def check_tables(): try: with Session(engine) as _session: # Create tables metadata.create_all(engine) _session.commit() logger.info("Tables created if they didn't already exist.") except OperationalError as e: logger.error("Error Creating table: %s", e) raise http_500(msg="An error occurred while creating tables.") from e ================================================ FILE: app/db/repositories/__init__.py ================================================ ================================================ FILE: app/db/repositories/auth/__init__.py ================================================ ================================================ FILE: app/db/repositories/auth/auth.py ================================================ from typing import Any, Coroutine from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from app.api.dependencies.auth_utils import ( get_hashed_password, verify_password, create_access_token, create_refresh_token, ) from app.core.exceptions import http_400, http_403 from app.db.tables.auth.auth import User from app.schemas.auth.bands import UserOut, UserAuth class AuthRepository: def __init__(self, session: AsyncSession) -> None: self.session = session async def _check_user_or_none( self, userdata: UserAuth ) -> Coroutine[Any, Any, Any | None]: stmt = select(User).where( User.username == userdata.username or User.email == userdata.email ) result = await self.session.execute(stmt) return result.scalar_one_or_none() async def get_user(self, field: str, detail: str): stmt = "" if field == "username": stmt = select(User).where(User.username == detail) elif field == "email": stmt = select(User).where(User.email == detail) result = await self.session.execute(stmt) return result.scalar_one_or_none() async def signup(self, userdata: UserAuth) -> UserOut: # Checking if the user already exists if await self._check_user_or_none(userdata) is not None: raise http_400(msg="User with details already exists") # hashing the password hashed_password = get_hashed_password(password=userdata.password) userdata.password = hashed_password new_user = User(**userdata.model_dump()) self.session.add(new_user) await self.session.commit() await self.session.refresh(new_user) return new_user async def login(self, ipdata): user = await self.get_user(field="username", detail=ipdata.username) if user is None: raise http_403(msg="Recheck the credentials") user = user.__dict__ hashed_password = user.get("password") if not verify_password( password=ipdata.password, hashed_password=hashed_password ): raise http_403("Incorrect Password") return { "token_type": "bearer", "access_token": create_access_token( subject={"id": user.get("id"), "username": user.get("username")} ), "refresh_token": create_refresh_token( subject={"id": user.get("id"), "username": user.get("username")} ), } ================================================ FILE: app/db/repositories/documents/__init__.py ================================================ ================================================ FILE: app/db/repositories/documents/document_organization.py ================================================ from typing import Any, Dict, List, Union from app.api.dependencies.constants import SUPPORTED_FILE_TYPES from app.schemas.documents.documents_metadata import DocumentMetadataRead class DocumentOrgRepository: """ Repository for managing document organization. """ def __init__(self): ... @staticmethod async def _search_tags( docs: List[DocumentMetadataRead], tags: List[str] ) -> List[Dict[str, str]]: result = [] for doc in docs: doc = doc.__dict__ result.extend( doc for tag in tags if doc["tags"] and "".join(tag.split()) in doc["tags"] ) return result or None @staticmethod async def _search_category( docs: List[DocumentMetadataRead], categories: List[str] ) -> List[Dict[str, str]]: result = [] for doc in docs: doc = doc.__dict__ result.extend( doc for category in categories if doc["categories"] and "".join(category.split()) in doc["categories"] ) return result or None @staticmethod async def _search_file_type( docs: List[DocumentMetadataRead], file_types: List[str] ) -> List[Dict[str, str]]: result = [] for doc in docs: doc = doc.__dict__ for ftype in file_types: ftype = "".join(ftype.split()) result.extend( doc for key, val in SUPPORTED_FILE_TYPES.items() if val == ftype and key == doc["file_type"] ) return result or None @staticmethod async def _search_by_status( docs: List[DocumentMetadataRead], status: List[str] ) -> List[Dict[str, str]]: result = [] for doc in docs: doc = doc.__dict__ result.extend( doc for stat in status if str(doc["status"]) == f"StatusEnum.{stat}" ) return result or None async def search_doc( self, docs: List[DocumentMetadataRead], tags: str, categories: str, file_types: str, status: str, ) -> Union[List[List[Dict[str, Any]]], None]: results = [] if tags: tags = tags.split(",") results.append(await self._search_tags(docs=docs, tags=tags)) if categories: categories = categories.split(",") results.append( await self._search_category(docs=docs, categories=categories) ) if file_types: file_type = file_types.split(",") results.append( await self._search_file_type(docs=docs, file_types=file_type) ) if status: _status = status.split(",") results.append(await self._search_by_status(docs=docs, status=_status)) return results ================================================ FILE: app/db/repositories/documents/document_sharing.py ================================================ import asyncio import os import secrets import tempfile from datetime import datetime, timedelta, timezone from typing import Dict, Any, Union, List import boto3 from botocore.exceptions import NoCredentialsError from sqlalchemy import select, update, delete from sqlalchemy.ext.asyncio import AsyncSession from app.api.dependencies.mail_service import mail_service from app.api.dependencies.repositories import get_key from app.core.config import settings from app.core.exceptions import http_404, http_500 from app.db.tables.auth.auth import User from app.db.tables.documents.document_sharing import DocumentSharing from app.db.repositories.auth.auth import AuthRepository from app.db.repositories.documents.notify import NotifyRepo from app.logs.logger import docflow_logger from app.schemas.auth.bands import TokenData from app.schemas.documents.document_sharing import SharingRequest class DocumentSharingRepository: """ Repository for managing document sharing. """ def __init__(self, session: AsyncSession) -> None: boto3_config = { "aws_access_key_id": settings.aws_access_key_id, "aws_secret_access_key": settings.aws_secret_key, "region_name": settings.aws_region, } if settings.s3_endpoint_url: boto3_config["endpoint_url"] = settings.s3_endpoint_url self.client = boto3.client("s3", **boto3_config) self.session = session async def get_user_mail(self, user: TokenData): stmt = select(User).where(User.id == user.id) execute = await self.session.execute(stmt) return execute.scalar_one_or_none().__dict__["email"] @staticmethod async def _generate_id() -> str: return secrets.token_urlsafe(8) async def _get_saved_links(self, filename: str) -> Dict[str, Any]: stmt = select(DocumentSharing).where(DocumentSharing.filename == filename) result = await self.session.execute(stmt) return result.scalar_one_or_none() async def update_visits(self, filename: str, visits_left: int): if visits_left > 1: await self.session.execute( update(DocumentSharing) .where(DocumentSharing.filename == filename) .values(visits=visits_left - 1) ) elif visits_left == 1: await self.session.execute( delete(DocumentSharing).where(DocumentSharing.filename == filename) ) await self.session.commit() async def cleanup_expired_links(self): now = datetime.now(timezone.utc) stmt = delete(DocumentSharing).where(DocumentSharing.expires_at <= now) try: await self.session.execute(stmt) except Exception as e: raise http_500() from e async def get_presigned_url( self, doc: Dict[str, Any] ) -> Union[str, Dict[str, str]]: try: params = { "Bucket": settings.s3_bucket, "Key": await get_key(s3_url=doc["s3_url"]), } response = await asyncio.to_thread( self.client.generate_presigned_url, "get_object", Params=params, ExpiresIn=3600, ) except NoCredentialsError as e: return {"error": f"Invalid AWS Credentials: {e}"} return response async def get_shareable_link( self, owner_id: str, url: str, visits: int, filename: str, share_to: List[str] ): # task to clean uo the database for expired links await self.cleanup_expired_links() if ans := await self._get_saved_links(filename=filename): ans = ans.__dict__ return { "note": f"Links already shared... valid Till {ans['expires_at']}", "response": { "shareable_link": f"{settings.host_url}{settings.api_prefix}/doc/{ans['url_id']}", "visits_left": ans["visits"], }, } url_id = await self._generate_id() share_entry = DocumentSharing( url_id=url_id, owner_id=owner_id, filename=filename, url=url, expires_at=datetime.now(timezone.utc) + timedelta(seconds=3599), visits=visits, share_to=share_to, ) try: self.session.add(share_entry) await self.session.commit() await self.session.refresh(share_entry) response = share_entry.__dict__ return { "shareable_link": f"{settings.host_url}{settings.api_prefix}/doc/{response['url_id']}", "visits": response["visits"], } except Exception as e: raise http_500() from e async def get_redirect_url(self, url_id: str): stmt = select(DocumentSharing).where(DocumentSharing.url_id == url_id) result = await self.session.execute(stmt) try: result = result.scalar_one_or_none().__dict__ await self.update_visits( filename=result["filename"], visits_left=result["visits"] ) return result["url"] except AttributeError as e: raise http_404( msg="Shared URL link either expired or reached the limit of visits..." ) from e async def send_mail( self, user: TokenData, mail_to: Union[List[str], None], link: str ) -> None: if mail_to: user_mail = await self.get_user_mail(user) subj = f"DocFlow: {user.username} share a document" content = f""" Visit the link: {link}, to access the document shared by {user.username} | {user_mail}. """ for mails in mail_to: mail_service( mail_to=mails, subject=subj, content=content, file_path=None ) async def confirm_access(self, user: TokenData, url_id: str | None) -> bool: # check if login user is owner or to whom it is shared stmt = select(DocumentSharing).where(DocumentSharing.url_id == url_id) result = await self.session.execute(stmt) try: result = result.scalar_one_or_none().__dict__ user_mail = await self.get_user_mail(user) return ( result.get("owner_id") == user.id or user_mail in result.get("share_to") or user.username in result.get("share_to") ) except Exception as e: raise http_404(msg="The link has expired...") from e async def share_document( self, filename: str, document_key: str, file: Any, share_request: SharingRequest, notify: bool, owner: TokenData, notify_repo: NotifyRepo, auth_repo: AuthRepository, ) -> None: user_mail = await self.get_user_mail(owner) share_to = share_request.share_to # Determining extension _, extension = os.path.splitext(document_key) # Creating temp file to share; delete=False so the file exists when mail_service reads it temp = tempfile.NamedTemporaryFile(delete=False, suffix=extension) try: temp.write(file) temp.close() temp_path = temp.name subject = f"{owner.username} shared a file with you using DocFlow" for mails in share_to: content = f""" Hello {mails}! Hope you are well? {owner.username} | {user_mail} shared a file with you as an attachment. Message: {share_request.message} Regards, DocFlow """ mail_service( mail_to=mails, subject=subject, content=content, file_path=temp_path ) finally: os.unlink(temp_path) if notify: return await notify_repo.notify( user=owner, receivers=share_to, filename=filename, auth_repo=auth_repo ) return None ================================================ FILE: app/db/repositories/documents/documents.py ================================================ import asyncio import hashlib import os import tempfile from typing import Dict, Any import boto3 from botocore.exceptions import ClientError from fastapi import File from starlette.responses import FileResponse from app.api.dependencies.constants import SUPPORTED_FILE_TYPES from app.api.dependencies.repositories import TempFileResponse, get_key, get_s3_url from app.core.config import settings from app.core.exceptions import http_400, http_404 from app.db.repositories.documents.documents_metadata import DocumentMetadataRepository from app.logs.logger import docflow_logger from app.schemas.auth.bands import TokenData def _build_boto3_config() -> dict: config = { "aws_access_key_id": settings.aws_access_key_id, "aws_secret_access_key": settings.aws_secret_key, "region_name": settings.aws_region, } if settings.s3_endpoint_url: config["endpoint_url"] = settings.s3_endpoint_url return config _boto3_config = _build_boto3_config() _s3_resource = boto3.resource("s3", **_boto3_config) _s3_client = boto3.client("s3", **_boto3_config) _s3_bucket = _s3_resource.Bucket(settings.s3_bucket) try: _s3_client.put_bucket_versioning( Bucket=settings.s3_bucket, VersioningConfiguration={"Status": "Enabled"} ) except Exception: # MinIO does not support versioning in all configurations pass async def perm_delete( file: str, delete_all: bool, meta_repo: DocumentMetadataRepository, user: TokenData ) -> None: if delete_all: await meta_repo.empty_bin(owner=user) else: doc = await meta_repo.bin_list(owner=user) for docs in doc.get("response"): if docs.DocumentMetadata.name == file: doc_id = docs.DocumentMetadata.id await meta_repo.perm_delete_a_doc(document=doc_id, owner=user) class DocumentRepository: def __init__(self): self.s3_client = _s3_resource self.client = _s3_client self.s3_bucket = _s3_bucket @staticmethod async def _calculate_file_hash(file: File) -> str: file.file.seek(0) contents = file.file.read() file.file.seek(0) return hashlib.sha256(contents).hexdigest() async def get_s3_file_object_body(self, key: str): def _get(): obj = self.client.get_object(Bucket=settings.s3_bucket, Key=key) return obj["Body"].read() return await asyncio.to_thread(_get) async def _delete_object(self, key: str) -> None: await asyncio.to_thread( self.client.delete_object, Bucket=settings.s3_bucket, Key=key ) async def _upload_new_file( self, file: File, folder: str, contents, file_type: str, user: TokenData ) -> Dict[str, Any]: from ulid import ULID if folder is None: key = f"{user.id}/{str(ULID())}.{SUPPORTED_FILE_TYPES[file_type]}" else: key = f"{user.id}/{folder}/{str(ULID())}.{SUPPORTED_FILE_TYPES[file_type]}" await asyncio.to_thread(self.s3_bucket.put_object, Key=key, Body=contents) return { "response": "file_added", "upload": { "owner_id": user.id, "name": file.filename, "s3_url": await get_s3_url(key=key), "size": len(contents), "file_type": file_type, "file_hash": await self._calculate_file_hash(file=file), }, } async def _upload_new_version( self, doc: dict, file: File, contents, file_type: str, new_file_hash: str, is_owner: bool, ) -> Dict[str, Any]: key = await get_key(s3_url=doc["s3_url"]) await asyncio.to_thread(self.s3_bucket.put_object, Key=key, Body=contents) return { "response": "file_updated", "is_owner": is_owner, "upload": { "name": file.filename, "s3_url": await get_s3_url(key=key), "size": len(contents), "file_type": file_type, "file_hash": new_file_hash, }, } async def upload( self, metadata_repo, user_repo, file: File, folder: str, user: TokenData ) -> Dict[str, Any]: """ Uploads a file to the specified folder in the document repository. Args: metadata_repo: The repository for accessing metadata. user_repo: The repository for accessing user information. file: The file to be uploaded. folder: The folder in which the file should be uploaded. user: The token data of the user. Returns: @return: A dictionary containing the response and upload information. Raises: HTTP_400: If the file type is not supported. """ file_type = file.content_type if file_type not in SUPPORTED_FILE_TYPES: raise http_400(msg=f"File type {file_type} not supported.") contents = await file.read() doc = (await metadata_repo.get(document=file.filename, owner=user)).__dict__ new_file_hash: str = await self._calculate_file_hash(file=file) if "status_code" in doc.keys(): # getting document irrespective of user if get_doc := await metadata_repo.get_doc(filename=file.filename): get_doc = get_doc.__dict__ # Check if logged-in user has update access logged_in_user = ( await user_repo.get_user(field="username", detail=user.username) ).__dict__ if (get_doc["access_to"] is not None) and logged_in_user[ "email" ] in get_doc["access_to"]: if get_doc["file_hash"] != new_file_hash: docflow_logger.info( f"User has update access to file owned by: {get_doc['owner_id']}" ) return await self._upload_new_version( doc=get_doc, file=file, contents=contents, file_type=file_type, new_file_hash=await self._calculate_file_hash(file=file), is_owner=False, ) else: return await self._upload_new_file( file=file, folder=folder, contents=contents, file_type=file_type, user=user, ) return await self._upload_new_file( file=file, folder=folder, contents=contents, file_type=file_type, user=user, ) docflow_logger.info( f"File {file.filename} already present, checking for updates..." ) if doc["file_hash"] != new_file_hash: docflow_logger.info("File has been updated, uploading new version...") return await self._upload_new_version( doc=doc, file=file, contents=contents, file_type=file_type, new_file_hash=new_file_hash, is_owner=True, ) return { "response": "File already present and no changes detected.", "upload": "Nothing to update...", } async def download(self, s3_url: str, name: str) -> Dict[str, str]: key = get_key(s3_url=s3_url) try: await asyncio.to_thread( self.s3_client.meta.client.download_file, settings.s3_bucket, await key, r"/app/downloads/docflow_" + f"{name}", ) except ClientError as e: raise http_404(msg=f"File not found: {e}") from e return {"message": f"successfully downloaded {name} in downloads folder."} async def preview(self, document: Dict[str, Any]) -> FileResponse: key = await get_key(s3_url=document["s3_url"]) file = await self.get_s3_file_object_body(key) _, extension = os.path.splitext(key) ext = extension.lower() if ext in [".jpg", ".jpeg", ".png", ".gif"]: media_type = "image/" + ext.lstrip(".") elif ext == ".pdf": media_type = "application/pdf" elif ext == ".json": media_type = "application/json" elif ext == ".xml": media_type = "application/xml" elif ext == ".txt": media_type = "text/plain" else: raise ValueError("Unsupported file type.") with tempfile.NamedTemporaryFile(delete=False, suffix=extension) as temp: temp.write(file) temp_path = temp.name return TempFileResponse(temp_path, media_type=media_type) ================================================ FILE: app/db/repositories/documents/documents_metadata.py ================================================ from datetime import datetime, timezone, timedelta from typing import Any, Dict, List, Union from uuid import UUID from fastapi import HTTPException from sqlalchemy import select, update, insert, delete from sqlalchemy.engine import Row from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import aliased from app.core.exceptions import http_409, http_404 from app.db.repositories.auth.auth import AuthRepository from app.db.tables.documents.documents_metadata import DocumentMetadata, doc_user_access from app.db.tables.base_class import StatusEnum from app.schemas.auth.bands import TokenData from app.schemas.documents.bands import DocumentMetadataPatch from app.schemas.documents.documents_metadata import ( DocumentMetadataCreate, DocumentMetadataRead, ) class DocumentMetadataRepository: def __init__(self, session: AsyncSession) -> None: self.session = session self.doc_cls = aliased(DocumentMetadata, name="doc_cls") async def _get_instance(self, document: Union[str, UUID], owner: TokenData): try: UUID(str(document)) stmt = ( select(self.doc_cls) .where(self.doc_cls.owner_id == owner.id) .where(self.doc_cls.id == document) .where(self.doc_cls.status != StatusEnum.deleted) ) except ValueError: stmt = ( select(self.doc_cls) .where(self.doc_cls.owner_id == owner.id) .where(self.doc_cls.name == document) .where(self.doc_cls.status != StatusEnum.deleted) ) result = await self.session.execute(stmt) return result.scalar_one_or_none() @staticmethod async def _extract_changes(document_patch: DocumentMetadataPatch) -> dict: if isinstance(document_patch, dict): return document_patch return document_patch.model_dump(exclude_unset=True) async def _execute_update( self, db_document: DocumentMetadata | Dict[str, Any], changes: dict ) -> None: if isinstance(db_document, dict): stmt = ( update(DocumentMetadata) .where(DocumentMetadata.id == db_document.get("id")) .values(changes) ) doc_name = db_document.get("name") else: stmt = ( update(DocumentMetadata) .where(DocumentMetadata.id == db_document.id) .values(changes) ) doc_name = db_document.name try: await self.session.execute(stmt) except Exception as e: raise http_409(msg=f"Error while updating document: {doc_name}") from e async def _update_access_and_permission(self, db_document, changes, user_repo): access_given_to = changes.get("access_to", []) # if access_to has email ids, update doc_user_access table with doc_id and user_id for user_email in access_given_to: try: user_id = ( await user_repo.get_user(field="email", detail=user_email) ).__dict__["id"] # update doc_user_access table with doc_id and user_id await self._update_doc_user_access(db_document, user_id) except IntegrityError as e: raise http_409(msg=f"User '{user_email}' already has access...") from e except AttributeError as e: raise http_404( msg=f"The user with '{user_email}' does not exists, make sure user has account in DocFlow." ) from e async def _update_doc_user_access(self, db_document, user_id): stmt = insert(doc_user_access).values( doc_id=db_document.__dict__["id"], user_id=user_id ) await self.session.execute(stmt) await self.session.commit() async def _delete_access(self, document) -> None: await self.session.execute( doc_user_access.delete().where(doc_user_access.c.doc_id == document.id) ) async def _auto_delete(self, bin_items: List) -> bool: now = datetime.now(timezone.utc) deleted_any = False for doc in bin_items: if doc.deleted_at is not None and doc.deleted_at <= now: stmt = delete(DocumentMetadata).where(DocumentMetadata.id == doc.id) await self.session.execute(stmt) deleted_any = True if deleted_any: await self.session.commit() return deleted_any async def get_doc(self, filename: str) -> Dict[str, Any]: """ Get document by filename irrespective of logged-in user. Args: self: The instance of the class. filename (str): The name of the document. Returns: Dict[str, Any]: The document metadata. """ stmt = ( select(DocumentMetadata) .where(DocumentMetadata.name == filename) .where(self.doc_cls.status != StatusEnum.deleted) ) result = await self.session.execute(stmt) return result.scalar_one_or_none() async def upload( self, document_upload: DocumentMetadataCreate ) -> DocumentMetadataRead: if not isinstance(document_upload, dict): db_document = DocumentMetadata(**document_upload.model_dump()) else: db_document = DocumentMetadata(**document_upload) try: self.session.add(db_document) await self.session.commit() await self.session.refresh(db_document) except IntegrityError as e: raise http_404( msg=f"Document with name: {document_upload.name} already exists.", ) from e return DocumentMetadataRead(**db_document.__dict__) async def doc_list( self, owner: TokenData, limit: int = 10, offset: int = 0 ) -> Dict[str, Union[List[DocumentMetadataRead], Any]]: stmt = ( select(self.doc_cls) .join(DocumentMetadata, DocumentMetadata.id == self.doc_cls.id) .where(DocumentMetadata.owner_id == owner.id) .where(DocumentMetadata.status != StatusEnum.deleted) .offset(offset) .limit(limit) ) try: result = await self.session.execute(stmt) result_list = result.fetchall() for row in result_list: row.doc_cls.__dict__.pop("_sa_instance_state", None) result = [ DocumentMetadataRead(**row.doc_cls.__dict__) for row in result_list ] return {"response": result, "no_of_docs": len(result)} except Exception as e: raise http_404(msg="No Documents found") from e async def get( self, document: Union[str, UUID], owner: TokenData ) -> Union[DocumentMetadataRead, HTTPException]: db_document = await self._get_instance(document=document, owner=owner) if db_document is None: return http_409(msg=f"No Document with {document}") return DocumentMetadataRead(**db_document.__dict__) async def patch( self, document: Union[str, UUID], document_patch: DocumentMetadataPatch, owner: TokenData, user_repo: AuthRepository, is_owner: bool, ) -> Union[DocumentMetadataRead, HTTPException]: if is_owner: db_document = await self._get_instance(document=document, owner=owner) changes = await self._extract_changes(document_patch) await self._update_access_and_permission(db_document, changes, user_repo) await self._execute_update(db_document, changes) else: # This condition will be activated when, the new version of file is added by a privileged member # here privileged member is one who have access to update the document. db_document = await self.get_doc(filename=document) changes = await self._extract_changes(document_patch) if changes: await self._execute_update(db_document, changes) return DocumentMetadataRead(**db_document.__dict__) async def delete(self, document: Union[str, UUID], owner: TokenData) -> None: try: db_document = await self._get_instance(document=document, owner=owner) setattr(db_document, "status", StatusEnum.deleted) setattr(db_document, "tags", None) setattr(db_document, "access_to", None) setattr(db_document, "file_type", None) setattr(db_document, "categories", None) setattr( db_document, "deleted_at", datetime.now(timezone.utc) + timedelta(days=30), ) # delete entry from doc_user_access table await self._delete_access(document=db_document) self.session.add(db_document) await self.session.commit() except Exception as e: raise http_404(msg=f"No file with {document}") from e async def bin_list(self, owner: TokenData) -> Dict[str, List[Row | Row] | int]: stmt = ( select(DocumentMetadata) .where(DocumentMetadata.owner_id == owner.id) .where(DocumentMetadata.status == StatusEnum.deleted) ) result = (await self.session.execute(stmt)).scalars().all() if await self._auto_delete(result): result = (await self.session.execute(stmt)).scalars().all() serialized = [] for doc in result: d = {k: v for k, v in doc.__dict__.items() if k != "_sa_instance_state"} serialized.append(DocumentMetadataRead(**d)) return {"response": serialized, "no_of_docs": len(serialized)} async def restore(self, file: str, owner: TokenData) -> DocumentMetadataRead: stmt = ( select(DocumentMetadata) .where(DocumentMetadata.owner_id == owner.id) .where(DocumentMetadata.name == file) .where(DocumentMetadata.status == StatusEnum.deleted) ) db_doc = (await self.session.execute(stmt)).scalar_one_or_none() if db_doc is None: raise http_404(msg=f"'{file}' not found in trash") await self._execute_update(db_document=db_doc, changes={"status": StatusEnum.private, "deleted_at": None}) await self.session.commit() d = {k: v for k, v in db_doc.__dict__.items() if k != "_sa_instance_state"} return DocumentMetadataRead(**d) async def perm_delete_a_doc(self, document: UUID | None, owner: TokenData) -> None: stmt = ( delete(DocumentMetadata) .where(DocumentMetadata.owner_id == owner.id) .where(DocumentMetadata.id == document) .where(DocumentMetadata.status == StatusEnum.deleted) ) await self.session.execute(stmt) async def empty_bin(self, owner: TokenData): stmt = ( delete(DocumentMetadata) .where(DocumentMetadata.owner_id == owner.id) .where(DocumentMetadata.status == StatusEnum.deleted) ) await self.session.execute(stmt) async def archive(self, file: str, user: TokenData): doc = await self._get_instance(document=file, owner=user) if doc and doc.status == StatusEnum.archived: raise http_409(msg="Doc is already archived") if doc is None: raise http_404(msg="Doc does not exist") await self._execute_update(db_document=doc, changes={"status": StatusEnum.archived}) await self.session.commit() d = {k: v for k, v in doc.__dict__.items() if k != "_sa_instance_state"} return DocumentMetadataRead(**d) async def archive_list(self, user: TokenData) -> Dict[str, List[str] | int]: stmt = ( select(DocumentMetadata) .where(DocumentMetadata.owner_id == user.id) .where(DocumentMetadata.status == StatusEnum.archived) ) result = (await self.session.execute(stmt)).scalars().all() serialized = [ DocumentMetadataRead(**{k: v for k, v in doc.__dict__.items() if k != "_sa_instance_state"}) for doc in result ] return {"response": serialized, "no_of_docs": len(serialized)} async def un_archive(self, file: str, user: TokenData) -> DocumentMetadataRead: doc = await self._get_instance(document=file, owner=user) if doc and doc.status == StatusEnum.archived: change = {"status": "private"} await self._execute_update(db_document=doc, changes=change) return DocumentMetadataRead(**doc.__dict__) if doc and doc.status != StatusEnum.archived: raise http_409(msg="Doc is not archived") raise http_404(msg="Doc does not exits") ================================================ FILE: app/db/repositories/documents/notify.py ================================================ from typing import List from uuid import UUID from sqlalchemy import select, update, delete from sqlalchemy.ext.asyncio import AsyncSession from app.core.exceptions import http_500, http_409, http_404 from app.db.repositories.auth.auth import AuthRepository from app.db.tables.base_class import NotifyEnum from app.db.tables.documents.notify import Notify from app.schemas.auth.bands import TokenData from app.schemas.documents.bands import Notification, NotifyPatchStatus class NotifyRepo: def __init__(self, session: AsyncSession) -> None: self.session = session async def notify( self, user: TokenData, receivers: List[str], filename: str, auth_repo: AuthRepository, ) -> None: """ Notify users about a shared file. Args: user (TokenData): The authenticated user who shared the file. receivers (List[str]): The list of email addresses of the users to be notified. filename (str): The name of the shared file. auth_repo (AuthRepository): The repository for accessing user authentication data. Returns: None Raises: HTTP_500: If an error occurs while adding the notification entry. """ for receiver in receivers: receiver_details = await auth_repo.get_user(field="email", detail=receiver) try: notify_entry = Notify( receiver_id=receiver_details.__dict__["id"], message=f"{user.username} shared {filename} with you! Access the shared file via mail...", status=NotifyEnum.unread, ) try: self.session.add(notify_entry) await self.session.commit() await self.session.refresh(notify_entry) except Exception as e: raise http_500( msg="Error notifying the user, but the mail has been sent successfully." ) from e except Exception as e: raise http_404( msg="The user does not exists, make sure the user has an account on docflow..." ) from e async def get_notification_by_id(self, n_id: UUID, user: TokenData) -> Notification: """ Get a notification by its ID for a specific user. Args: n_id (UUID): The ID of the notification. user (TokenData): The authenticated user. Returns: Notification: The notification object. Raises: HTTP_404: If no notification with the given ID is found. """ stmt = select(Notify).where(Notify.receiver_id == user.id and Notify.id == n_id) try: result = (await self.session.execute(stmt)).scalar_one_or_none() return Notification(**result.__dict__) except Exception as e: raise http_404(msg=f"No notification with id: {n_id}") from e async def get_notifications(self, user: TokenData) -> List[Notification]: """ Get all notifications for a specific user. Args: user (TokenData): The authenticated user. Returns: List[Notification]: A list of notification objects. """ stmt = select(Notify).where(Notify.receiver_id == user.id) notifications = (await self.session.execute(stmt)).fetchall() return [ Notification(**notification.Notify.__dict__) for notification in notifications ] async def mark_all_read(self, user: TokenData) -> List[Notification]: """ Mark all notifications as read for a specific user. Args: user (TokenData): The authenticated user. Returns: List[Notification]: A list of notification objects that have been marked as read. Raises: HTTP_409: If an error occurs while updating the notification status. """ stmt = ( update(Notify) .where(Notify.receiver_id == user.id and Notify.status != NotifyEnum.read) .values({Notify.status: NotifyEnum.read}) ) try: await self.session.execute(stmt) return await self.get_notifications(user=user) except Exception as e: raise http_409(msg="Error updating marking notification read...") from e async def update_status( self, n_id: UUID, updated_status: NotifyPatchStatus, user: TokenData ): """ Update the status of a notification for a specific user. Args: n_id (UUID): The ID of the notification to update. updated_status (NotifyPatchStatus): The updated status for the notification. user (TokenData): The authenticated user. Returns: Notification: The updated notification object. Raises: HTTP_409: If an error occurs while updating the notification status. """ stmt = ( update(Notify) .where( Notify.receiver_id == user.id and Notify.id == n_id and Notify.status != updated_status.status ) .values({Notify.status: updated_status.status}) ) try: await self.session.execute(stmt) return await self.get_notification_by_id(n_id=n_id, user=user) except Exception as e: raise http_409(msg="Error updating notification status...") from e async def clear_notification(self, user: TokenData) -> None: """ Clear all notifications for a specific user. Args: user (TokenData): The authenticated user. Returns: None Raises: Exception: If an error occurs while clearing the notifications. """ stmt = delete(Notify).where(Notify.receiver_id == user.id) try: await self.session.execute(stmt) except Exception as e: raise e ================================================ FILE: app/db/tables/__init__.py ================================================ ================================================ FILE: app/db/tables/auth/__init__.py ================================================ ================================================ FILE: app/db/tables/auth/auth.py ================================================ from sqlalchemy import Column, String, Text, TIMESTAMP from sqlalchemy.orm import relationship from sqlalchemy.sql.expression import text from app.api.dependencies.repositories import get_ulid from app.db.models import Base class User(Base): __tablename__ = "users" id = Column( String(26), primary_key=True, default=get_ulid, unique=True, index=True, nullable=False, ) username: str = Column(String, unique=True, nullable=False) email = Column(String, unique=True, nullable=False) password = Column(Text, nullable=False) user_since = Column( TIMESTAMP(timezone=True), nullable=False, server_default=text("now()") ) owner_of = relationship("DocumentMetadata", back_populates="owner") ================================================ FILE: app/db/tables/base_class.py ================================================ import enum class StatusEnum(enum.Enum): """ Enum for status of document """ public = "public" private = "private" shared = "shared" deleted = "deleted" archived = "archived" class NotifyEnum(enum.Enum): """ Enum for status of notification """ read = "read" unread = "unread" @classmethod def has_value(cls, value): return value in cls._value2member_map_ ================================================ FILE: app/db/tables/documents/__init__.py ================================================ ================================================ FILE: app/db/tables/documents/document_sharing.py ================================================ from typing import List, Optional from datetime import datetime, timezone from sqlalchemy import Column, Integer, String, DateTime, ARRAY, ForeignKey from sqlalchemy.orm import Mapped from app.db.models import Base class DocumentSharing(Base): __tablename__ = "share_url" url_id: str = Column(String, primary_key=True, nullable=False, unique=True) filename: str = Column(String, unique=True, nullable=False) owner_id: Mapped[str] = Column(String, ForeignKey("users.id"), nullable=False) url: str = Column(String, unique=True) expires_at = Column( DateTime(timezone=True), default=datetime.now(timezone.utc), ) visits: int = Column(Integer) share_to: Optional[List[str]] = Column(ARRAY(String)) ================================================ FILE: app/db/tables/documents/documents_metadata.py ================================================ from datetime import datetime, timezone from uuid import uuid4 from typing import List, Optional from sqlalchemy import ( Column, String, Integer, ARRAY, text, DateTime, Enum, ForeignKey, Table, UniqueConstraint, ) from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import Mapped, relationship from app.db.models import Base from app.db.tables.base_class import StatusEnum doc_user_access = Table( "doc_user_access", Base.metadata, Column( "doc_id", UUID(as_uuid=True), ForeignKey("document_metadata.id", ondelete="CASCADE"), ), Column("user_id", String(26), ForeignKey("users.id")), UniqueConstraint("doc_id", "user_id", name="uq_doc_user_access_doc_user"), ) class DocumentMetadata(Base): __tablename__ = "document_metadata" id: UUID = Column( UUID(as_uuid=True), default=uuid4, primary_key=True, index=True, nullable=False ) owner_id: Mapped[str] = Column(String, ForeignKey("users.id"), nullable=False) name: str = Column(String) s3_url: str = Column(String, unique=True) created_at = Column( DateTime(timezone=True), default=datetime.now(timezone.utc), nullable=False, server_default=text("NOW()"), ) size: Optional[int] = Column(Integer) file_type: Optional[str] = Column(String) tags: Optional[List[str]] = Column(ARRAY(String)) categories: Optional[List[str]] = Column(ARRAY(String)) status: Enum = Column(Enum(StatusEnum), default=StatusEnum.private) file_hash: Optional[str] = Column(String) access_to: Optional[List[str]] = Column(ARRAY(String)) deleted_at = Column(DateTime(timezone=True), nullable=True) update_access = relationship( "User", secondary=doc_user_access, passive_deletes=True ) owner = relationship("User", back_populates="owner_of") ================================================ FILE: app/db/tables/documents/notify.py ================================================ from datetime import datetime, timezone from uuid import uuid4 from sqlalchemy import Column, String, Text, Enum, DateTime, text from sqlalchemy.dialects.postgresql import UUID from app.db.tables.base_class import NotifyEnum from app.db.models import Base class Notify(Base): __tablename__ = "notify" id: UUID = Column( UUID(as_uuid=True), default=uuid4, primary_key=True, index=True, nullable=False ) receiver_id: str = Column(String, nullable=False) message: str = Column(Text, nullable=False) status: NotifyEnum = Column(Enum(NotifyEnum), default=NotifyEnum.unread) notified_at = Column( DateTime(timezone=True), default=datetime.now(timezone.utc), nullable=False, server_default=text("NOW()"), ) ================================================ FILE: app/docs/DocFlow-DocumentManagementAPI.postman_collection.json ================================================ { "info": { "_postman_id": "fb26f538-c78a-4354-a392-7d77e3bf78ad", "name": "DocFlow - Document Management API", "description": "DocFlow is a powerful Document Management API designed to streamline document handling, including seamless uploading, downloading, organization, versioning, sharing, and more.\n\n## 🚀 Key Features\n\n💡 Document Upload and Download \n💡 Organization and Searching \n💡 Versioning \n💡 Sharing \n💡 Authentication and Authorization \n💡 Access Control List \n💡 Deletion and Archiving \n💡 Document Preview \n💡 Send file via Email \n💡 Conversion to PDF \n💡 Full Text Search \n💡 Text Analysis and OCR\n\n## 🐙 Githib\n\nLink: [https://www.github.com/jiisanda/docflow](https://www.github.com/jiisanda/docflow)\n\n## 🛟 Help and Suppor\n\nFor any questions or support, please [contact](https://harshjaiswal2307@gmail.com).\n\nEnjoy using DocFlow to manage your documents seamlessly!", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", "_exporter_id": "20984268" }, "item": [ { "name": "🦕 api", "item": [ { "name": "👤 user", "item": [ { "name": "🆕👤 Create new user", "request": { "method": "POST", "header": [ { "key": "Content-Type", "value": "application/json" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"username\": \"\",\n \"email\": \"\",\n \"password\": \"\"\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/u/signup", "host": [ "{{baseUrl}}" ], "path": [ "v2", "u", "signup" ] } }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "POST", "header": [ { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"username\": \"\",\n \"email\": \"\",\n \"password\": \"\"\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/u/signup", "host": [ "{{baseUrl}}" ], "path": [ "v2", "u", "signup" ] } }, "status": "Created", "code": 201, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"id\": \"\",\n \"email\": \"\",\n \"user_since\": \"\"\n}" }, { "name": "Validation Error", "originalRequest": { "method": "POST", "header": [ { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"username\": \"\",\n \"email\": \"\",\n \"password\": \"\"\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/u/signup", "host": [ "{{baseUrl}}" ], "path": [ "v2", "u", "signup" ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "🔑♻️ Create access and refresh tokens for user", "request": { "method": "POST", "header": [ { "key": "Content-Type", "value": "application/x-www-form-urlencoded" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "urlencoded", "urlencoded": [ { "key": "username", "value": "", "description": "(Required) " }, { "key": "password", "value": "", "description": "(Required) " }, { "key": "grant_type", "value": "" }, { "key": "scope", "value": "" }, { "key": "client_id", "value": "" }, { "key": "client_secret", "value": "" } ] }, "url": { "raw": "{{baseUrl}}/v2/u/login", "host": [ "{{baseUrl}}" ], "path": [ "v2", "u", "login" ] } }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "POST", "header": [ { "key": "Accept", "value": "application/json" } ], "body": { "mode": "urlencoded", "urlencoded": [ { "key": "username", "value": "", "description": "(Required) " }, { "key": "password", "value": "", "description": "(Required) " }, { "key": "grant_type", "value": "" }, { "key": "scope", "value": "" }, { "key": "client_id", "value": "" }, { "key": "client_secret", "value": "" } ] }, "url": { "raw": "{{baseUrl}}/v2/u/login", "host": [ "{{baseUrl}}" ], "path": [ "v2", "u", "login" ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" }, { "name": "Validation Error", "originalRequest": { "method": "POST", "header": [ { "key": "Accept", "value": "application/json" } ], "body": { "mode": "urlencoded", "urlencoded": [ { "key": "username", "value": "", "description": "(Required) " }, { "key": "password", "value": "", "description": "(Required) " }, { "key": "grant_type", "value": "" }, { "key": "scope", "value": "" }, { "key": "client_id", "value": "" }, { "key": "client_secret", "value": "" } ] }, "url": { "raw": "{{baseUrl}}/v2/u/login", "host": [ "{{baseUrl}}" ], "path": [ "v2", "u", "login" ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "👤 Get details of currently logged in user", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/u/me", "host": [ "{{baseUrl}}" ], "path": [ "v2", "u", "me" ] }, "description": "~TODO: Add an extra db for user profile then return details\n For now returning {Userid and Username}" }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/u/me", "host": [ "{{baseUrl}}" ], "path": [ "v2", "u", "me" ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"id\": \"\",\n \"username\": \"\"\n}" } ] } ] }, { "name": "📝 Document", "item": [ { "name": "📤 Upload Document", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "POST", "header": [ { "key": "Content-Type", "value": "multipart/form-data" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "formdata", "formdata": [ { "key": "files", "description": "(Required) ", "type": "file", "src": [] } ] }, "url": { "raw": "{{baseUrl}}/v2/upload", "host": [ "{{baseUrl}}" ], "path": [ "v2", "upload" ], "query": [ { "key": "folder", "value": "", "disabled": true } ] }, "description": "Uploads a document to the specified folder.\n\nArgs:\n file (UploadFile): The file to be uploaded.\n folder (Optional[str]): The folder where the document will be stored. Defaults to None.\n repository (DocumentRepository): The repository for managing documents.\n metadata_repository (DocumentMetadataRepository): The repository for managing document metadata.\n user_repository (AuthRepository): The repository for managing user authentication.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n Union[DocumentMetadataRead, Dict[str, str]]: If the file is added, returns the uploaded document metadata.\n If the file is updated, returns the patched document metadata.\n Otherwise, returns a response dictionary.\n\nRaises:\n HTTP_400: If no input file is provided." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "formdata", "formdata": [ { "key": "file", "value": "", "description": "(Required) ", "type": "text" } ] }, "url": { "raw": "{{baseUrl}}/v2/upload?folder=", "host": [ "{{baseUrl}}" ], "path": [ "v2", "upload" ], "query": [ { "key": "folder", "value": "" } ] } }, "status": "Created", "code": 201, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" }, { "name": "Validation Error", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "formdata", "formdata": [ { "key": "file", "value": "", "description": "(Required) ", "type": "text" } ] }, "url": { "raw": "{{baseUrl}}/v2/upload?folder=", "host": [ "{{baseUrl}}" ], "path": [ "v2", "upload" ], "query": [ { "key": "folder", "value": "" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "📥 Download Document", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/file/:file_name/download", "host": [ "{{baseUrl}}" ], "path": [ "v2", "file", ":file_name", "download" ], "variable": [ { "key": "file_name", "value": "" } ] }, "description": "Downloads a document with the specified file name.\n\nArgs:\n file_name (str): The name of the file to be downloaded.\n repository (DocumentRepository): The repository for managing documents.\n metadata_repository (DocumentMetadataRepository): The repository for managing document metadata.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n object: The downloaded document.\n\nRaises:\n HTTP_400: If no file name is provided.\n HTTP_404: If no file with the specified name is found." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/:file_name/download", "host": [ "{{baseUrl}}" ], "path": [ "v2", ":file_name", "download" ], "variable": [ { "key": "file_name", "value": "" } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" }, { "name": "Validation Error", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/:file_name/download", "host": [ "{{baseUrl}}" ], "path": [ "v2", ":file_name", "download" ], "variable": [ { "key": "file_name", "value": "" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "🚮 Add To Bin", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "DELETE", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/:file_name", "host": [ "{{baseUrl}}" ], "path": [ "v2", ":file_name" ], "variable": [ { "key": "file_name", "value": "" } ] }, "description": "Adds a document to the bin for deletion.\n\nArgs:\n file_name (str): The name of the file to be added to the bin.\n metadata_repository (DocumentMetadataRepository): The repository for managing document metadata.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n None: If the file is added to the bin." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "DELETE", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" } ], "url": { "raw": "{{baseUrl}}/v2/:file_name", "host": [ "{{baseUrl}}" ], "path": [ "v2", ":file_name" ], "variable": [ { "key": "file_name", "value": "" } ] } }, "status": "No Content", "code": 204, "_postman_previewlanguage": "text", "header": [ { "key": "Content-Type", "value": "text/plain" } ], "cookie": [], "body": "" }, { "name": "Validation Error", "originalRequest": { "method": "DELETE", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/:file_name", "host": [ "{{baseUrl}}" ], "path": [ "v2", ":file_name" ], "variable": [ { "key": "file_name", "value": "" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "🗑️💥 Permanently Delete Doc", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "DELETE", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/trash/:file_name", "host": [ "{{baseUrl}}" ], "path": [ "v2", "trash", ":file_name" ], "variable": [ { "key": "file_name", "value": "" } ] }, "description": "Permanently deletes a document.\n\nArgs:\n file_name (str, optional): The name of the file to be permanently deleted. Defaults to None.\n delete_all (bool): Flag indicating whether to delete all documents in the bin. Defaults to False.\n repository (DocumentRepository): The repository for managing documents.\n metadata_repository (DocumentMetadataRepository): The repository for managing document metadata.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n None: If the file is permanently deleted.\n\nRaises:\n HTTP_404: If no file with the specified name is found." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "DELETE", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" } ], "url": { "raw": "{{baseUrl}}/v2/trash/:file_name", "host": [ "{{baseUrl}}" ], "path": [ "v2", "trash", ":file_name" ], "variable": [ { "key": "file_name", "value": "" } ] } }, "status": "No Content", "code": 204, "_postman_previewlanguage": "text", "header": [ { "key": "Content-Type", "value": "text/plain" } ], "cookie": [], "body": "" }, { "name": "Validation Error", "originalRequest": { "method": "DELETE", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/trash/:file_name", "host": [ "{{baseUrl}}" ], "path": [ "v2", "trash", ":file_name" ], "variable": [ { "key": "file_name", "value": "" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "👀 Preview Document", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "addTokenTo", "value": "header", "type": "string" } ] }, "method": "GET", "header": [], "url": { "raw": "{{baseUrl}}/v2/preview/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "preview", ":document" ], "variable": [ { "key": "document", "value": "article1.png" } ] }, "description": "```\nGet the preview of a document.\nArgs:\n document (Union[str, UUID]): The ID or name of the document.\n repository (DocumentRe\npository): The repository for accessing document data.\n metadata_repository (DocumentMetadataRepository): The repository for accessing document metadata.\n user (TokenData): The user token data.\nReturns:\n FileResponse: The file response containing the document preview.\nRaises:\n HTTP_404: If the document ID or name is not provided or if the document does not exist.\n HTTP_400: If the file type is not supported for preview.\n\n ```" }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "key": "Authorization", "value": "{{token}}", "type": "text" } ], "url": { "raw": "{{baseUrl}}/v2/preview/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "preview", ":document" ], "variable": [ { "key": "document", "value": "article1.png" } ] } }, "_postman_previewlanguage": null, "header": null, "cookie": [], "body": null } ] }, { "name": "📃🗑️ List Of Bin", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/trash", "host": [ "{{baseUrl}}" ], "path": [ "v2", "trash" ] }, "description": "List bin.\n\nArgs:\n repository: The document metadata repository.\n owner: The token data of the owner.\n\nReturns:\n Dict[str, List[Row | Row] | int]: The list of bin." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/trash", "host": [ "{{baseUrl}}" ], "path": [ "v2", "trash" ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" } ] }, { "name": "🗑️🔄️ Restore From Bin", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "POST", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/restore/:file", "host": [ "{{baseUrl}}" ], "path": [ "v2", "restore", ":file" ], "variable": [ { "key": "file", "value": "" } ] }, "description": "Restore bin.\n\nArgs:\n file: The file to restore.\n repository: The document metadata repository.\n user: The token data of the user.\n\nReturns:\n DocumentMetadataRead: The restored document metadata." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/restore/:file", "host": [ "{{baseUrl}}" ], "path": [ "v2", "restore", ":file" ], "variable": [ { "key": "file", "value": "" } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"owner_id\": \"\",\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ],\n \"id\": \"\"\n}" }, { "name": "Validation Error", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/restore/:file", "host": [ "{{baseUrl}}" ], "path": [ "v2", "restore", ":file" ], "variable": [ { "key": "file", "value": "" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "🫗 Empty bin", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "addTokenTo", "value": "header", "type": "string" } ] }, "method": "DELETE", "header": [], "url": { "raw": "{{baseUrl}}/v2/trash", "host": [ "{{baseUrl}}" ], "path": [ "v2", "trash" ] } }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "DELETE", "header": [ { "key": "Authorization", "value": "{{token}}", "type": "text" } ], "url": { "raw": "{{baseUrl}}/v2/trash", "host": [ "{{baseUrl}}" ], "path": [ "v2", "trash" ] } }, "status": "No Content", "code": 204, "_postman_previewlanguage": null, "header": null, "cookie": [], "body": null } ] } ] }, { "name": "🔔 notifications", "item": [ { "name": "📫 Get Notifications", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/notifications", "host": [ "{{baseUrl}}" ], "path": [ "v2", "notifications" ] }, "description": "Get notifications for a user.\n\nArgs: \nrepository (NotifyRepo): The repository for accessing notification data. \nuser (TokenData): The authenticated user.\n\nReturns: \nList\\[Notification\\]: A list of notifications for the user." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/notifications", "host": [ "{{baseUrl}}" ], "path": [ "v2", "notifications" ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "[\n {\n \"id\": \"\",\n \"receiver_id\": \"\",\n \"message\": \"\",\n \"status\": \"\",\n \"notified_at\": \"\"\n },\n {\n \"id\": \"\",\n \"receiver_id\": \"\",\n \"message\": \"\",\n \"status\": \"\",\n \"notified_at\": \"\"\n }\n]" } ] }, { "name": "🧩🔔 Patch Status", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "PUT", "header": [ { "key": "Content-Type", "value": "application/json" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"status\": \"unread\",\n \"mark_all\": false\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/notifications/:notification_id", "host": [ "{{baseUrl}}" ], "path": [ "v2", "notifications", ":notification_id" ], "variable": [ { "key": "notification_id", "value": "" } ] }, "description": "Patch the status of a notification or mark all notifications as read.\n\nArgs:\n updated_status (NotifyPatchStatus, optional): The updated status for the notification. Defaults to None.\n mark_as_all_read (bool, optional): Flag indicating whether to mark all notifications as read. Defaults to False.\n notification_id (UUID, optional): The ID of the notification to update. Defaults to None.\n repository (NotifyRepo): The repository for accessing notification data.\n user (TokenData): The authenticated user.\n\nReturns:\n Union[List[Notification], Notification]: If `mark_as_all_read` is True, returns a list of all notifications\n marked as read. If `notification_id` is provided, returns the updated notification.\n Otherwise, raises an HTTP_404 exception.\n\nRaises:\n HTTP_404: If neither `mark_as_all_read` nor `notification_id` is provided." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "PUT", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"status\": \"unread\"\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/notifications/:notification_id", "host": [ "{{baseUrl}}" ], "path": [ "v2", "notifications", ":notification_id" ], "variable": [ { "key": "notification_id", "value": "" } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "[\n {\n \"id\": \"\",\n \"receiver_id\": \"\",\n \"message\": \"\",\n \"status\": \"\",\n \"notified_at\": \"\"\n },\n {\n \"id\": \"\",\n \"receiver_id\": \"\",\n \"message\": \"\",\n \"status\": \"\",\n \"notified_at\": \"\"\n }\n]" }, { "name": "Validation Error", "originalRequest": { "method": "PUT", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"status\": \"unread\"\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/notifications/:notification_id", "host": [ "{{baseUrl}}" ], "path": [ "v2", "notifications", ":notification_id" ], "variable": [ { "key": "notification_id", "value": "" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "🗑️ Clear All Notifications", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "DELETE", "header": [], "url": { "raw": "{{baseUrl}}/v2/notifications", "host": [ "{{baseUrl}}" ], "path": [ "v2", "notifications" ] }, "description": "Clear all notifications for a user.\n\nArgs:\n repository (NotifyRepo): The repository for accessing notification data.\n user (TokenData): The authenticated user.\n\nReturns:\n None" }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "DELETE", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" } ], "url": { "raw": "{{baseUrl}}/v2/notifications", "host": [ "{{baseUrl}}" ], "path": [ "v2", "notifications" ] } }, "status": "No Content", "code": 204, "_postman_previewlanguage": "text", "header": [ { "key": "Content-Type", "value": "text/plain" } ], "cookie": [], "body": "" } ] } ], "description": "Simple Notification System" }, { "name": "🏷️ Documents Metadata", "item": [ { "name": "📚 archive", "item": [ { "name": "📚📃 Archive A Document", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "POST", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/archive/:file_name", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "archive", ":file_name" ], "variable": [ { "key": "file_name", "value": "" } ] }, "description": "Archive a document.\n\nArgs:\n file_name (str): The name of the file to be archived.\n repository (DocumentMetadataRepository): The repository for document metadata.\n user (TokenData): The user token data.\n\nReturns:\n DocumentMetadataRead: The archived document metadata." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/archive/:file_name", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "archive", ":file_name" ], "variable": [ { "key": "file_name", "value": "" } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"owner_id\": \"\",\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ],\n \"id\": \"\"\n}" }, { "name": "Validation Error", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/api/document-metadata/archive?file_name=", "host": [ "{{baseUrl}}" ], "path": [ "api", "document-metadata", "archive" ], "query": [ { "key": "file_name", "value": "" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "📃📚 Archived Doc List", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/archive/list", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "archive", "list" ] }, "description": "Get the list of archived documents.\n\nArgs:\n repository (DocumentMetadataRepository): The repository for document metadata.\n user (TokenData): The user token data.\n\nReturns:\n Dict[str, List[str] | int]: A dictionary containing the list of archived documents." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/archive/list", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "archive", "list" ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" } ] }, { "name": "🔄️📚 Remove Doc From Archive", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "POST", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/un-archive/:file", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "un-archive", ":file" ], "variable": [ { "key": "file", "value": "" } ] }, "description": "Un-archive a document.\n\nArgs:\n file (str): The name of the file to be un-archived.\n repository (DocumentMetadataRepository): The repository for document metadata.\n user (TokenData): The user token data.\n\nReturns:\n DocumentMetadataRead: The un-archived document metadata." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/un-archive/:file", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "un-archive", ":file" ], "variable": [ { "key": "file", "value": "" } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"owner_id\": \"\",\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ],\n \"id\": \"\"\n}" }, { "name": "Validation Error", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/un-archive/:file", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "un-archive", ":file" ], "variable": [ { "key": "file", "value": "" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] } ] }, { "name": "📃🏷️ Get Documents Metadata", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata?limit=10&offset=0", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata" ], "query": [ { "key": "limit", "value": "10" }, { "key": "offset", "value": "0" } ] }, "description": "Retrieves a list of document metadata.\n\nArgs:\n limit (int): The maximum number of documents to retrieve. Defaults to 10.\n offset (int): The number of documents to skip. Defaults to 0.\n repository (DocumentMetadataRepository): The repository for managing document metadata.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n Dict[str, Union[List[DocumentMetadataRead], Any]]: A dictionary containing the list of document metadata." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata?limit=10&offset=0", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata" ], "query": [ { "key": "limit", "value": "10" }, { "key": "offset", "value": "0" } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"in_f\": [\n {\n \"owner_id\": \"\",\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ],\n \"id\": \"\"\n },\n {\n \"owner_id\": \"\",\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ],\n \"id\": \"\"\n }\n ],\n \"ex40\": [\n {\n \"owner_id\": \"\",\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ],\n \"id\": \"\"\n },\n {\n \"owner_id\": \"\",\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ],\n \"id\": \"\"\n }\n ]\n}" }, { "name": "Validation Error", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata?limit=10&offset=0", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata" ], "query": [ { "key": "limit", "value": "10" }, { "key": "offset", "value": "0" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "📤🏷️ Upload Documents Metadata", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "POST", "header": [ { "key": "Content-Type", "value": "application/json" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"owner_id\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ]\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/metadata/upload", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "upload" ] }, "description": "Uploads document metadata.\n\nArgs:\n document_upload (DocumentMetadataCreate): The document metadata to be uploaded.\n repository (DocumentMetadataRepository): The repository for managing document metadata.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n DocumentMetadataRead: The uploaded document metadata." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"owner_id\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ]\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/metadata/upload", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "upload" ] } }, "status": "Created", "code": 201, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"owner_id\": \"\",\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ],\n \"id\": \"\"\n}" }, { "name": "Validation Error", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"name\": \"\",\n \"s3_url\": \"\",\n \"created_at\": \"\",\n \"size\": \"\",\n \"file_type\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"status\": \"\",\n \"file_hash\": \"\",\n \"owner_id\": \"\",\n \"access_to\": [\n \"\",\n \"\"\n ]\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/metadata/upload", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", "upload" ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "📃 Get Document-Metadata", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/:document/detail", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", ":document", "detail" ], "variable": [ { "key": "document", "value": "" } ] }, "description": "Retrieves the metadata of a specific document.\n\nArgs:\n document (Union[str, UUID]): The ID or name of the document.\n repository (DocumentMetadataRepository): The repository for managing document metadata.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n Union[DocumentMetadataRead, HTTPException]: The document metadata if found, otherwise an HTTPException." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/:document/detail", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", ":document", "detail" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" }, { "name": "Validation Error", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/:document/detail", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", ":document", "detail" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "🧩📃 Update Doc Metadata Details", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "PUT", "header": [ { "key": "Content-Type", "value": "application/json" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"name\": \"\",\n \"tags\": [\n \"\"\n ],\n \"categories\": [\n \"\"\n ],\n \"access_to\": [\n \"\"\n ]\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/metadata/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", ":document" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] }, "description": "Updates the details of a document's metadata.\n\nArgs:\n document (Union[str, UUID]): The ID or name of the document.\n document_patch (DocumentMetadataPatch): The document metadata patch containing the updated details.\n repository (DocumentMetadataRepository): The repository for managing document metadata.\n user_repository (AuthRepository): The repository for managing user authentication.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n Union[DocumentMetadataRead, HTTPException]: The updated document metadata if successful,\n otherwise an HTTPException.\n\nRaises:\n HTTP_404: If no document with the specified ID or name is found." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "PUT", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"name\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"access_to\": [\n \"\",\n \"\"\n ]\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/metadata/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", ":document" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" }, { "name": "Validation Error", "originalRequest": { "method": "PUT", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"name\": \"\",\n \"tags\": [\n \"\",\n \"\"\n ],\n \"categories\": [\n \"\",\n \"\"\n ],\n \"access_to\": [\n \"\",\n \"\"\n ]\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/metadata/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", ":document" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "🗑️📃 Delete Document Metadata", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "DELETE", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", ":document" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] }, "description": "Deletes the metadata of a document and moves it to the bin.\n\nArgs:\n document (Union[str, UUID]): The identifier of the document to delete.\n repository (DocumentMetadataRepository): The repository for accessing document metadata.\n Defaults to the result of the `get_repository` function with `DocumentMetadataRepository` as the argument.\n user (TokenData): The token data of the current user. Defaults to the result of the `get_current_user` function.\n\nReturns:\n None (204_NO_CONTENT)\n\nRaises:\n HTTP_404: If no document with the specified identifier is found." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "DELETE", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", ":document" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] } }, "status": "No Content", "code": 204, "_postman_previewlanguage": "text", "header": [ { "key": "Content-Type", "value": "text/plain" } ], "cookie": [], "body": "" }, { "name": "Validation Error", "originalRequest": { "method": "DELETE", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/metadata/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "metadata", ":document" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] } ], "description": "Handles updating the databases" }, { "name": "📨 Sharing", "item": [ { "name": "📃📨🔗 Share Document Link", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "POST", "header": [ { "key": "Content-Type", "value": "application/json" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"visits\": 1,\n \"share_to\": [\n \"\"\n ]\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/share-link/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "share-link", ":document" ], "variable": [ { "key": "document", "value": "" } ] }, "description": "Shares a documents link with another user, sends a mail and notifies the receiver.\n\nArgs: \ndocument (Union\\[str, UUID\\]): The ID or name of the document to be shared. \nshare_request (SharingRequest): The sharing request containing the details of the sharing operation. \nrepository (DocumentSharingRepository): The repository for managing document sharing. \nauth_repository (AuthRepository): The repository for managing User related queries. \nmetadata_repository (DocumentMetadataRepository): The repository for managing document metadata. \nnotify_repository (NotifyRepo): The repository for managing notification \nuser (TokenData): The token data of the authenticated user.\n\nReturns: \nDict\\[str, str\\]: A dictionary containing the personal URL and shareable link.\n\nRaises: \nHTTP_404: If no document with the specified ID or name is found." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"visits\": 1,\n \"share_to\": [\n \"\",\n \"\"\n ]\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/share-link/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "share-link", ":document" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" }, { "name": "Validation Error", "originalRequest": { "method": "POST", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"visits\": 1,\n \"share_to\": [\n \"\",\n \"\"\n ]\n}", "options": { "raw": { "language": "json" } } }, "url": { "raw": "{{baseUrl}}/v2/share-link/:document", "host": [ "{{baseUrl}}" ], "path": [ "v2", "share-link", ":document" ], "variable": [ { "key": "document", "value": "", "description": "(Required) " } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "🔄️📤 Redirect To Share", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/doc/:url_id", "host": [ "{{baseUrl}}" ], "path": [ "v2", "doc", ":url_id" ], "variable": [ { "key": "url_id", "value": "", "description": "(Required) " } ] }, "description": "Redirects to a shared document URL.\n\nArgs:\n url_id (str): The ID of the shared document URL.\n repository (DocumentSharingRepository): The repository for managing document sharing.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n RedirectResponse: A redirect response to the shared document URL." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/doc/:url_id", "host": [ "{{baseUrl}}" ], "path": [ "v2", "doc", ":url_id" ], "variable": [ { "key": "url_id", "value": "", "description": "(Required) " } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" }, { "name": "Validation Error", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/doc/:url_id", "host": [ "{{baseUrl}}" ], "path": [ "v2", "doc", ":url_id" ], "variable": [ { "key": "url_id", "value": "", "description": "(Required) " } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] }, { "name": "📃📨 Share Document", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "addTokenTo", "value": "header", "type": "string" } ] }, "method": "POST", "header": [ { "key": "accept", "value": "application/json" }, { "key": "Content-Type", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"visits\": 1,\n \"share_to\": [\n \"\"\n ]\n}" }, "url": { "raw": "{{baseUrl}}/v2/share/:document?notify=true", "host": [ "{{baseUrl}}" ], "path": [ "v2", "share", ":document" ], "query": [ { "key": "notify", "value": "true" } ], "variable": [ { "key": "document", "value": "" } ] }, "description": "```\nShare a document with other users, and notifies if notify is set to True (default).\nArgs: \n document (Union[str, UUID]): The ID or UUID of the document to be shared. \n share_request (SharingRequest): The sharing request containing the recipients and permissions. \n notify (bool, optional): Whether to send notifications to the recipients. Defaults to True. \n repository (DocumentSharingRepository, optional): The repository for document sharing operations. \n document_repo (DocumentRepository, optional): The repository for document operations. \n metadata_repo (DocumentMetadataRepository, optional): The repository for document metadata operations. \n notify_repo (NotifyRepo, optional): The repository for notification operations. \n auth_repo (AuthRepository, optional): The repository for authentication operations. \n user (TokenData, optional): The authenticated user.\nRaises: \n HTTP_404: If the document is not found.\nReturns: \n None\n\n ```" }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "POST", "header": [ { "key": "accept", "value": "application/json" }, { "key": "Content-Type", "value": "application/json" } ], "body": { "mode": "raw", "raw": "{\n \"visits\": 1,\n \"share_to\": [\n \"\",\n \"\"\n ]\n}" }, "url": { "raw": "{{baseUrl}}/v2/share/:document?notify=true", "host": [ "{{baseUrl}}" ], "path": [ "v2", "share", ":document" ], "query": [ { "key": "notify", "value": "true" } ], "variable": [ { "key": "document", "value": "" } ] } }, "_postman_previewlanguage": null, "header": null, "cookie": [], "body": null } ] } ], "description": "Sharing a Document" }, { "name": "🔍 Searching", "item": [ { "name": "🔎📃 Search Document", "request": { "auth": { "type": "oauth2", "oauth2": [ { "key": "accessTokenUrl", "value": "api/u/login", "type": "string" }, { "key": "grant_type", "value": "password_credentials", "type": "string" } ] }, "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/filter?limit=10&offset=0&tag=&category=&file_types=&doc_status=", "host": [ "{{baseUrl}}" ], "path": [ "v2", "filter" ], "query": [ { "key": "limit", "value": "10" }, { "key": "offset", "value": "0" }, { "key": "tag", "value": "" }, { "key": "category", "value": "" }, { "key": "file_types", "value": "" }, { "key": "doc_status", "value": "" } ] }, "description": "Searches for documents based on specified criteria.\n\nArgs:\n limit (int): The maximum number of documents to retrieve. Defaults to 10.\n offset (int): The number of documents to skip. Defaults to 0.\n tag (str, optional): The tag to filter documents by. Defaults to None.\n category (str, optional): The category to filter documents by. Defaults to None.\n file_types (str, optional): The file types to filter documents by. Defaults to None.\n doc_status (str, optional): The status of documents to filter by. Defaults to None.\n repository (DocumentOrgRepository): The repository for managing document organization.\n repository_metadata (DocumentMetadataRepository): The repository for managing document metadata.\n user (TokenData): The token data of the authenticated user.\n\nReturns:\n List[DocumentMetadataRead] or List[Dict[str, Any]]: The list of matching documents." }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/filter?limit=10&offset=0&tag=&category=&file_types=&doc_status=", "host": [ "{{baseUrl}}" ], "path": [ "v2", "filter" ], "query": [ { "key": "limit", "value": "10" }, { "key": "offset", "value": "0" }, { "key": "tag", "value": "" }, { "key": "category", "value": "" }, { "key": "file_types", "value": "" }, { "key": "doc_status", "value": "" } ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" }, { "name": "Validation Error", "originalRequest": { "method": "GET", "header": [ { "description": "Added as a part of security scheme: oauth2", "key": "Authorization", "value": "{{token}}" }, { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/v2/filter?limit=10&offset=0&tag=&category=&file_types=&doc_status=", "host": [ "{{baseUrl}}" ], "path": [ "v2", "filter" ], "query": [ { "key": "limit", "value": "10" }, { "key": "offset", "value": "0" }, { "key": "tag", "value": "" }, { "key": "category", "value": "" }, { "key": "file_types", "value": "" }, { "key": "doc_status", "value": "" } ] } }, "status": "Unprocessable Entity (WebDAV) (RFC 4918)", "code": 422, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" } ] } ], "description": "Searching a document with tags, categories, file type and status..." } ], "description": "Contains all the APIs..." }, { "name": "Root", "request": { "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/", "host": [ "{{baseUrl}}" ], "path": [ "" ] } }, "response": [ { "name": "Successful Response", "originalRequest": { "method": "GET", "header": [ { "key": "Accept", "value": "application/json" } ], "url": { "raw": "{{baseUrl}}/", "host": [ "{{baseUrl}}" ], "path": [ "" ] } }, "status": "OK", "code": 200, "_postman_previewlanguage": "json", "header": [ { "key": "Content-Type", "value": "application/json" } ], "cookie": [], "body": "{}" } ] } ], "variable": [ { "key": "baseUrl", "value": "/", "type": "string" } ] } ================================================ FILE: app/docs/commands/docker.md ================================================ # Docker Commands Download the image from [DockerHub](https://hub.docker.com/r/jiisanda/docflow). OR ```commandline docker pull jiisanda/docflow ``` ### Get all containers ```commandline docker ps ``` ### Docker-compose up For development, following will by default reads two files, a `docker-compose.yml` and an optional `docker-compose.override.yml` file. ```commandline docker-compose up ``` For production, you can use the `-f` option to specify the files to use. ```commandline docker-compose -f docker-compose.yml -f docker-compose.prod.yml up ``` ### docker-compose up in detach mode ```commandline docker-compose up -d ``` ### Building an image Build an image if there are changes in the code ```commandline docker-compose up --build ``` ### Stopping/Deleting all containers ```commandline docker-compose down ``` ### To access databases created inside docker container of docflow ```commandline docker exec -it psql -U ``` ================================================ FILE: app/docs/commands/postgres.md ================================================ # PostgreSQL command ##### Connect to psql inside docker container ```commandline docker exec -it psql -U ``` ##### Lists all databases ```commandline postgres-# \l ``` ##### Connect to a postgres table ```commandline postgres-# \c ``` ##### List of tables in database ```commandline postgres-# \dt ``` ================================================ FILE: app/docs/features/postman.md ================================================ # 👨‍💻 Postman Setup Guide Explore DocFlow's API endpoints effortlessly using Postman! 🚀 > ## Currently, facing issues with postman online documentation! Can directly export to postman using [Import for Postman](../DocFlow-DocumentManagementAPI.postman_collection.json) --- To access the complete list of API endpoints, visit our [Postman Documentation](https://documenter.getpostman.com/view/20984268/2s9YRGxUcp). ## 📥 Importing the Collection Follow these simple steps to import the Postman collection: 1. Click the link. 2. In the top right corner, click Run in Postman. 3. Select Postman for Windows. 4. Import the collection. Now you're ready to dive into DocFlow's powerful APIs! ## 🌐 Set Up Environment Variable Configure the environment variable basicUrl with the value localhost:8000. This variable will be used as the base URL for your API requests. ## 🚀 Try It Out! With the collection imported and the environment set up, explore DocFlow's features by testing the various endpoints. Get a hands-on experience with the power of DocFlow's API. Happy testing! If you have any questions or need assistance, feel free to reach out. 📞 ================================================ FILE: app/docs/features/preview.md ================================================ # Preview in Docflow Let's see how the preview feature of DocFlow works. 🚀 - 🎯 Endpoint: `GET /v2/preview/:document` - ⚙️ Params: `{document: }` - 🔐 Authorization: `Bearer ` ➰ cURL: ```shell curl --location 'localhost:8000/v2/preview/:document' \ --header 'Authorization: Bearer ' ``` Here in Preview we use two important models, `fastapi.response`'s `FileResponse` amd `tempfile`'s `NamedTemporaryFile`. `FileResponse` is used to return files. `NamedTemporaryFile` is a function in Python's `tempfile` module that creates a temporary file with a unique name in the system's default location for temporary files. This function returns a file-like object that can be used in a similar way to other file objects. Here is the brief explanation on how it works: - When we call `NameTemporaryFile()`, it creates a new file in you system's temporary directory. - The temporary file is opened in binary mode (`wb+`) by default, and it can be read from and written to like any other file object. - The temporary file is deleted as soon as it is closed. This is controlled by the `delete` parameter, which is `True` by default. This is important to set it `True`, as if not done then it could fill up the server's storage. In our case, we have set `delete=False`, and we have override the `FileResponse` to add a `__del__` method, which will be deleting the file once the response is sent (see: `api/dependencies/repositories`). Issue we were experiencing is because the temporary file was getting deleted as soon as it's closed, which happens when `with` block is exited. We were getting the following error `RuntimeError: File at path /tmp/tmpuc5ru1oh.png does not exist.` And this is expected behaviour when `delete=True` is set in `tempfile.NamedTemporaryFile`. The following figure describes how the Preview in DocFlow works. ![preview-document](../imgs/document/document_preview.png) DocFlow allows, preview of only two media types, `image/` and `application/pdf`. ================================================ FILE: app/docs/features/sharing.md ================================================ # Document Sharing in DocFlow Let's see how sharing happens in Docflow. 🚀 For sharing DocFlow has 3-endpoints: 1) **Share Document Link**: Share document as a link. 2) **Redirect To Share**: Redirects the shared link to the document. 3) **Share Document**: Shares document. ### Share Document Link - 🎯 Endpoint: `POST /v2/share-link/:document` - ⚙️ Params: `Path Params=document` - 📦 Payload: ` { "visits": 1, "share_to": [ "" ] } ` - 🔐 Authorization: `Bearer ` ➰ cURL: ```shell curl --location 'localhost:8000/v2/share-link/:document' \ --header 'Content-Type: application/json' \ --header 'Accept: application/json' \ --header 'Authorization: Bearer ' \ --data '{ "visits": 1, "share_to": [ "" ] }' ``` Let's see how this endpoint works, the code for the following endpoint is on [app/api/routes/document_sharing.py](https://github.com/jiisanda/docflow/blob/master/app/api/routes/documents/document_sharing.py) under `share_link_document()`. It takes in the following arguments... ```text document (Union[str, UUID]): The ID or name of the document to be shared. share_request (SharingRequest): The sharing request containing the details of the sharing operation. repository (DocumentSharingRepository): The repository for managing document sharing. auth_repository (AuthRepository): The repository for managing User related queries. metadata_repository (DocumentMetadataRepository): The repository for managing document metadata. notify_repository (NotifyRepo): The repository for managing notification user (TokenData): The token data of the authenticated user. ``` And returns: ```json { "personal_url": "", "share_this": "" } ``` So when we share a document by link, the shared email/username should hold an account on docflow (`share_to`); also we limit the link's usage with `visits`. `personal_url`, which is noting but a `pre_signed_url`, which gets generated by `aws`. and `share_this`, which is a shareable link, generates a link as `/api/doc/` which has fixed number of visits and can only be accessed by users, specified by owner of the file with `share_to`. This shared url acts as shortened url to access the document, in a controlled manner. Now this `share_this` link can be accessed with the second endpoint, **Redirect to share**. ### Redirect to Share - 🎯 Endpoint: `POST /v2/doc/:url_id` - ⚙️ Params: `Path Params=url_id` - 🔐 Authorization: `Bearer ` ➰ cURL: ```shell curl --location 'localhost:8000/v2/doc/:url_id' \ --header 'Accept: application/json' \ --header 'Authorization: Bearer ' ``` The code for the following endpoint is on [app/api/routes/document_sharing.py](https://github.com/jiisanda/docflow/blob/master/app/api/routes/documents/document_sharing.py). called `redirect_to_share()`. It takes in the following arguments ```text url_id (str): The ID of the shared document URL. repository (DocumentSharingRepository): The repository for managing document sharing. user (TokenData): The token data of the authenticated user. ``` And returns: A `RedirectResponse()` to a file, which downloads the document. Now when the user tries to access the endpoint with the valid `url_id`, the user is able to download the file. As the number of clicks reaches the limit `visits`, the link is no longer valid. ### Share Document - 🎯 Endpoint: `POST /v2/share/document?document=¬ify=true` - ⚙️ Params: `document: notify:true` - 🔐 Authorization: `Bearer ` ➰ cURL: ```shell curl --location 'localhost:8000/v2/share/document?document=¬ify=true' \ --header 'accept: application/json' \ --header 'Content-Type: application/json' \ --header 'Authorization: Bearer ' \ --data '{ "visits": 1, "share_to": [ "" ] }' ``` The code for the following endpoint can be reviewed from [app/api/routes/document_sharing.py](https://github.com/jiisanda/docflow/blob/master/app/api/routes/documents/document_sharing.py) under `share_document`. It takes the following argument: ```text document (Union[str, UUID]): The ID or UUID of the document to be shared. share_request (SharingRequest): The sharing request containing the recipients and permissions. notify (bool, optional): Whether to send notifications to the recipients. Defaults to True. repository (DocumentSharingRepository, optional): The repository for document sharing operations. document_repo (DocumentRepository, optional): The repository for document operations. metadata_repo (DocumentMetadataRepository, optional): The repository for document metadata operations. notify_repo (NotifyRepo, optional): The repository for notification operations. auth_repo (AuthRepository, optional): The repository for authentication operations. user (TokenData, optional): The authenticated user. ``` Here, we send a file to users via mail as an attachment, and here it's not mandatory for user to whome we are sharing a file to have a account on docflow. How sharing as attachment works is, we use `tempfile`, a python library for creating a temporary file. Here is a code snippet of how its done. ```python # Creating temp file to share with tempfile.NamedTemporaryFile(delete=True, suffix=extension) as temp: temp.write(file) temp_path = temp.name subject = f"{owner.username} shared a file with you using DocFlow" for mails in share_to: content = f""" Hello {mails}! Hope you are well? {owner.username} | {user_mail} shared a file with you as an attachment. Regards, DocFlow """ mail_service(mail_to=mails, subject=subject, content=content, file_path=temp_path) ``` So for all the mail ids the user enters in `share_to`, `mail_service()` is called. And this is how mailing in DocFlow works... *** ================================================ FILE: app/docs/features/upload.md ================================================ # Document Upload in DocFlow - 🎯 Endpoint: `POST /v2/upload` - ⚙️ Params: `{ folder: , file=@"/path/to/file }` - 🔐 Authorization: `Bearer ` ➰ cURL: ```commandline curl --location 'localhost:8000/v2/upload?folder=' \ --header 'Content-Type: multipart/form-data' \ --header 'Accept: application/json' \ --header 'Authorization: Bearer ' \ --form 'file=@"/path/to/file"' ``` So the following API, upload the file to s3 and adds the metadata to the database, so how this works: The endpoint returns a `DocumentMetadataRead` object, which contains metadata about the uploaded document, or a Dict[str, str] object in case of an error or other non-standard situation. The response of `DocumentRepository.upload` has two cases, `file_added` and `file_updated`. `file_added` is the case when the uploaded file does not exist for that user. And `file_updated`, when the file with the same name, exits and the new version of the file is added (details about versioning is mentioned [here]()). ### Code Explanation ```python 1. async def upload(self, metadata_repo, user_repo, file: File, folder: str, user: TokenData) -> Dict[str, Any]: 2. """ 3. Uploads a file to the specified folder in the document repository. 4. 5. Args: 6. @param metadata_repo: The repository for accessing metadata. 7. @param user_repo: The repository for accessing user information. 8. @param file: The file to be uploaded. 9. @param folder: The folder in which the file should be uploaded. 10. @param user: The token data of the user. 11. 12. Returns: 13. @return: A dictionary containing the response and upload information. 14. 15. Raises: 16. HTTP_400: If the file type is not supported. 17. """ 18. 19. file_type = file.content_type 20. if file_type not in SUPPORTED_FILE_TYPES: 21. raise HTTP_400( 22. msg=f"File type {file_type} not supported." 23. ) 24. 25. contents = file.file.read() 26. 27. doc = (await metadata_repo.get(document=file.filename, owner=user)).__dict__ 28. # hash of the file uploaded to check if change in file 29. new_file_hash: str = await self._calculate_file_hash(file=file) 30. if "status_code" in doc.keys(): 31. # getting document irrespective of user 32. if get_doc := (await metadata_repo.get_doc(filename=file.filename)): 33. get_doc = get_doc.__dict__ 34. # Check if logged-in user has update access 35. logged_in_user = (await user_repo.get_user(field="username", detail=user.username)).__dict__ 36. if (get_doc["access_to"] is not None) and logged_in_user["email"] in get_doc["access_to"]: 37. if get_doc['file_hash'] != new_file_hash: 38. # can upload a version to a file... 39. print(f"Have update access, to a file... owner: {get_doc['owner_id']}") 40. return await self._upload_new_version( 41. doc=get_doc, file=file, contents=contents, file_type=file_type, 42. new_file_hash=await self._calculate_file_hash(file=file), 43. is_owner=False 44. ) 45. else: 46. return await self._upload_new_file( 47. file=file, folder=folder, contents=contents, file_type=file_type, user=user 48. ) 49. return await self._upload_new_file( 50. file=file, folder=folder, contents=contents, file_type=file_type, user=user 51. ) 52. 53. print("File already present, checking if there is an update...") 54. 55. if doc["file_hash"] != new_file_hash: 56. print("File has been updated, uploading new version...") 57. return await self._upload_new_version(doc=doc, file=file, contents=contents, file_type=file_type, 58. new_file_hash=new_file_hash, is_owner=True) 59. 60. return { 61. "response": "File already present and no changes detected.", 62. "upload": "Noting to update..." 63. } ``` This the biggest chunk of code we have in docflow and handles multiple conditions, and cases. #### Parameters: ``` file: File -> File to be uploaded folder: str -> Folder to upload in... ``` From line 19-23, we are checking if the file type is supported for upload. Line 27, gets the document if the document is already present in the database of the logged-in user. It returns the following response if new file is uploaded. ```json { "status_code": 409, "detail": "No Document with ", "headers": null } ``` And below is the response if the file is already present. ```json { "owner_id": "", "name": "", "s3_url": "", "created_at": "2023-12-24T07:05:51.971123Z", "id": "", ... } ``` Line 29, calculates the hash of the file uploaded, to check if there is any change in file, if the file with same name is uploaded. So in line 30, we check if we have `status_code`, in the response of doc, if we does then it means the user does not have the document with the following name, which brings us to next case, i.e., have another user given the logged-in user permission to update the file, so we check that from line 31-39, and if the user had permissions, then uploads the new version with line 40 (using `_upload_new_version`), or uploads a new file (using `_upload_new_file`). Now if the file is already present, and we get some doc response from line 27, then we check if the file is updated if yes, upload a new version (using `_upload_new_version`) else, return ```json { "response": "File already present and no changes detected.", "upload": "Noting to update..." } ``` #### Returns: The upload function returns one of the few things: A json response, with `"response"` as the key, with value `"file updated"`, if new version of file is uploaded, `"file added"`, if new file is added. along with the metadata of the uploaded file. That's too much to handle for one method, I know but will try to improve the quality of the upload functionality. The following is the figure explaining the complete flow: ![UploadDoc](../imgs/document/document_upload.png) *** ================================================ FILE: app/docs/issues.txt ================================================ # Alembic Issues Issue101: Title: Alembic Migration Error Duplicate PostgresSQL ENUM Type Description: While running migrations, encountered an error related to presence of duplicate ENUM type. Details: Error msg: sqlalchemy.exc.ProgrammingError: (psycopg2.errors.DuplicateObject) type "" already exists Resolution: Resolved the issue with manually deleting the existing ';``` Alternative approach: would be to update the migrations file created after ```alembic revision --autogenerate -m "', sa.Enum('', '', name=''), schema='public', checkfirst=True ) ``` Prevention: ... ================================================ FILE: app/docs/setup.md ================================================ # 🚀 Setting up Docflow Locally Just a 3-step process to get Docflow up and running on your local machine! 🌐 ### 1️⃣ Clone the repository ```bash git clone https://www.github.com/jiisanda/docflow.git ``` ### 2️⃣ Configure Your Environment Start by creating your environment file using the provided [.env.template](https://github.com/jiisanda/docflow/blob/master/.env.template). This file contains all the necessary environment variables for Docflow. Save it inside the app/ directory. #### PostgreSQL Setup Set up your PostgreSQL environment variables: - `DATABASE_HOSTNAME`: By default, set to `postgres`. - `POSTGRES_USER`, `POSTGRES_PASSWORD` and `POSTGRES_PORT`: Enter your PostgreSQL username, password, and port (default is `5432`). - `POSTGRES_DB` and `POSTGRES_DB_TESTS`: Specify your database names (`POSTGRES_DB_TESTS` can be left blank). #### AWS Setup For AWS credentials (`AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`), follow these steps: >1. Sign in to the [AWS Management Console]() using your AWS account's root user credentials. >2. Navigate to Security Credentials and create an access key. >3. Copy the access key ID and secret key securely. >4. For S3 bucket setup, refer to creating a [bucket](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-bucket.html). > > Source: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_root-user_manage_add-key.html #### User Environment Keep `ACCESS_TOKEN_EXPIRE_MIN` and `REFRESH_TOKEN_EXPIRE_MIN` as default. Update the `ALGORITHM` of your choice (e.g., `HS256` or `RS256`). Generate `JWT_SECRET_KEY` and `JWT_REFRESH_SECRET_KEY` using Python: ```bash docflow$ python >> import secrets >> secrets.token_urlsafe(32) 'some-random-secret-of-length-32' >> secrets.token_hex(32) 'some-random-secret-of-length-32' ``` #### Email Service This section explains how to set up the email service using Gmail. Configure the following variables: ```.ignorelang SMTP_SERVER=smtp.gmail.com SMTP_PORT=587 EMAIL=Your email address used to create the app APP_PASSWORD=Generate an app password from your Google Account ``` Before starting, ensure you have enabled "Two-Factor Authentication" and "Less secure app access" for your Gmail account. >For a deeper understanding of environment variables in Python, check out this article: >[@dev.to/jiisanda](https://dev.to/jiisanda/how-does-python-dotenv-simplify-configuration-management-3ne6) ### 3️⃣ Run with Docker-Compose Ensure Docker is installed, then run: ```commandline docker-compose up --build ``` That's it! Docflow is now running on localhost:8000. If you face any issues, contact me I will help you set up or start an EC2 instance for testing docflow. ## ⏭️ Next Step To test it, use Postman following the steps in [postman.md](features/postman.md). *** ================================================ FILE: app/logs/__init__.py ================================================ ================================================ FILE: app/logs/logger.py ================================================ import os import logging import logging.config from pathlib import Path LOGGER_NAME: str = "docflow" LOG_FORMAT: str = ( "%(asctime)s [%(levelname)s] | %(name)s | %(filename)s | %(funcName)s | %(lineno)d | %(message)s" ) LOG_LEVEL: int = logging.DEBUG def get_log_file_path(): """ Get a writable log file path, trying multiple locations. Returns None if no writable location is found. """ possible_locations = [ "/usr/src/app/logs/docflow.log", "/app/logs/docflow.log", "/tmp/docflow.log", "docflow.log", ] for log_path in possible_locations: try: log_file = Path(log_path) log_file.parent.mkdir(parents=True, exist_ok=True) test_write = log_file.parent / f"test_write_{os.getpid()}.tmp" test_write.touch() test_write.unlink() return str(log_file) except (OSError, PermissionError): continue return None LOG_FILE = get_log_file_path() LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "standard": { "format": LOG_FORMAT, "datefmt": "%Y-%m-%d %H:%M:%S", }, "console": { "format": "%(asctime)s [%(levelname)s] | %(name)s | %(message)s", "datefmt": "%Y-%m-%d %H:%M:%S", }, }, "handlers": { "console": { "level": "INFO", "formatter": "console", "class": "logging.StreamHandler", "stream": "ext://sys.stdout", }, "error_console": { "level": "ERROR", "formatter": "standard", "class": "logging.StreamHandler", "stream": "ext://sys.stderr", }, }, "loggers": { "": {"handlers": ["console"], "level": "INFO", "propagate": False}, LOGGER_NAME: { "handlers": ["console", "error_console"], "level": LOG_LEVEL, "propagate": False, }, "sqlalchemy": { "handlers": ["console"], "level": "WARNING", "propagate": False, }, "s3": { "handlers": ["console"], "level": "WARNING", "propagate": False, }, "uvicorn.error": {"level": "INFO", "handlers": ["console"], "propagate": False}, "uvicorn.access": { "level": "INFO", "handlers": ["console"], "propagate": False, }, "uvicorn.asgi": {"level": "INFO", "handlers": ["console"], "propagate": False}, }, } if LOG_FILE: LOGGING["handlers"]["file"] = { "class": "logging.handlers.RotatingFileHandler", "formatter": "standard", "level": "DEBUG", "filename": LOG_FILE, "mode": "a", "encoding": "utf-8", "maxBytes": 500000, "backupCount": 4, } LOGGING["loggers"][LOGGER_NAME]["handlers"].append("file") LOGGING["loggers"]["sqlalchemy"]["handlers"].append("file") LOGGING["loggers"]["s3"]["handlers"].append("file") try: logging.config.dictConfig(LOGGING) except Exception as e: logging.basicConfig( level=LOG_LEVEL, format=LOG_FORMAT, handlers=[logging.StreamHandler()] ) print(f"Warning: Failed to configure logging: {e}") docflow_logger = logging.getLogger(LOGGER_NAME) s3_logger = logging.getLogger("s3") sqlalchemy_logger = logging.getLogger("sqlalchemy") if LOG_FILE: docflow_logger.info(f"File logging enabled: {LOG_FILE}") else: docflow_logger.warning("File logging disabled - no writable location found") ================================================ FILE: app/main.py ================================================ from contextlib import asynccontextmanager from fastapi import FastAPI from fastapi.responses import FileResponse from app.api.router import router from app.core.config import settings from app.db.models import check_tables from app.logs.logger import docflow_logger from app.scripts.init_bucket import create_bucket_if_not_exists @asynccontextmanager async def lifespan(app: FastAPI): docflow_logger.info("Starting DocFlow...") try: docflow_logger.info("Initializing Tables and Storage buckets...") await check_tables() await create_bucket_if_not_exists() docflow_logger.info("Tables and Storage buckets successfully created.") except Exception as e: docflow_logger.error(f"Error during startup: {e}") raise yield app = FastAPI( title=settings.title, version=settings.version, description=settings.description, docs_url=settings.docs_url, openapi_url=settings.openapi_url, lifespan=lifespan, ) app.include_router(router=router, prefix=settings.api_prefix) FAVICON_PATH = "favicon.ico" @app.get(FAVICON_PATH, include_in_schema=False, tags=["Default"]) async def favicon(): return FileResponse(FAVICON_PATH) @app.get("/", tags=["Default"]) async def root(): return { "API": "DocFlow - Document Management API is running! 🚀", "version": settings.version, "docs": f"{settings.host_url}{settings.docs_url}", "storage": "MinIO" if settings.s3_endpoint_url else "AWS S3", } @app.get("/health", tags=["Default"]) async def health_check(): """Health check endpoint""" return {"status": "healthy", "service": "DocFlow API", "version": settings.version} ================================================ FILE: app/schemas/__init__.py ================================================ ================================================ FILE: app/schemas/auth/__init__.py ================================================ ================================================ FILE: app/schemas/auth/auth.py ================================================ from app.schemas.auth.bands import UserOut class SystemUser(UserOut): password: str ================================================ FILE: app/schemas/auth/bands.py ================================================ from datetime import datetime from typing import Annotated, Optional from ulid import ULID from pydantic import BaseModel, EmailStr, Field PydanticULID = Annotated[str, ULID] class UserAuth(BaseModel): username: str = Field(...) email: EmailStr = Field(..., description="Email ID") password: str = Field(..., min_length=5, max_length=14, description="Password") class UserOut(BaseModel): id: PydanticULID email: EmailStr user_since: datetime class Config: from_attributes = True class Token(BaseModel): access_token: str token_type: str class TokenData(BaseModel): id: Optional[str] = None username: Optional[str] = None ================================================ FILE: app/schemas/documents/__init__.py ================================================ ================================================ FILE: app/schemas/documents/bands.py ================================================ from datetime import datetime from typing import Optional, List from uuid import UUID from pydantic import BaseModel from app.db.tables.base_class import StatusEnum, NotifyEnum # Document Metadata class DocumentMetadataBase(BaseModel): _id: UUID owner_id: str name: str s3_url: str created_at: datetime size: Optional[int] file_type: Optional[str] tags: Optional[List[str]] categories: Optional[List[str]] status: StatusEnum file_hash: Optional[str] access_to: Optional[List[str]] class DocumentMetadataPatch(BaseModel): name: str = None tags: Optional[List[str]] = None categories: Optional[List[str]] = None access_to: Optional[List[str]] = None # Document Sharing class DocumentSharingBase(BaseModel): url_id: str owner_id: str filename: str url: str expires_at: datetime visits: int share_to: Optional[List[str]] = None class DocUserAccess(BaseModel): id: str doc_id: UUID user_id: str class Config: from_attribute = True class DocUserAccessCreate(BaseModel): doc_id: str user_id: str # Notifications class Notification(BaseModel): id: UUID receiver_id: str message: str status: NotifyEnum notified_at: datetime class NotifyPatchStatus(BaseModel): status: NotifyEnum = NotifyEnum.unread mark_all: bool = False ================================================ FILE: app/schemas/documents/document_sharing.py ================================================ from typing import List, Optional from pydantic import BaseModel from app.schemas.documents.bands import DocumentSharingBase class DocumentSharingCreate(DocumentSharingBase): ... class DocumentSharingRead(DocumentSharingBase): url_id: str visits: int class Config: from_attributes = True class SharingRequest(BaseModel): visits: int = 1 # default value of visits (1) share_to: Optional[List[str]] = None # emails, or usernames of users to share. message: Optional[str] = None # message from sender if any ================================================ FILE: app/schemas/documents/documents_metadata.py ================================================ from typing import Optional, List from uuid import UUID from app.schemas.documents.bands import DocumentMetadataBase class DocumentMetadataCreate(DocumentMetadataBase): owner_id: Optional[str] = None name: str s3_url: str access_to: Optional[List[str]] = None class DocumentMetadataRead(DocumentMetadataBase): id: UUID name: str class Config: from_attributes = True ================================================ FILE: app/scripts/create_database.sql ================================================ -- Creating the DATABASE CREATE DATABASE document_db; CREATE DATABASE test_document_db; GRANT ALL PRIVILEGES ON DATABASE document_db to "postgres"; GRANT ALL PRIVILEGES ON DATABASE test_document_db to "postgres"; ================================================ FILE: app/scripts/init_bucket.py ================================================ import asyncio import boto3 from botocore.exceptions import ClientError from app.core.config import settings from app.logs.logger import s3_logger async def create_bucket_if_not_exists(): """Create S3/MinIO bucket if it doesn't exist""" try: boto3_config = { "aws_access_key_id": settings.aws_access_key_id, "aws_secret_access_key": settings.aws_secret_key, "region_name": settings.aws_region, } if settings.s3_endpoint_url: boto3_config["endpoint_url"] = settings.s3_endpoint_url client = boto3.client("s3", **boto3_config) try: client.head_bucket(Bucket=settings.s3_bucket) s3_logger.info(f"✅ Bucket '{settings.s3_bucket}' already exists") except ClientError as e: error_code = e.response["Error"]["Code"] if error_code == "404": try: if settings.s3_endpoint_url: client.create_bucket(Bucket=settings.s3_bucket) else: if settings.aws_region == "us-east-1": client.create_bucket(Bucket=settings.s3_bucket) else: client.create_bucket( Bucket=settings.s3_bucket, CreateBucketConfiguration={ "LocationConstraint": settings.aws_region }, ) s3_logger.info(f"✅ Created bucket '{settings.s3_bucket}'") await asyncio.sleep(1) # waiting for bucket to get created try: client.put_bucket_versioning( Bucket=settings.s3_bucket, VersioningConfiguration={"Status": "Enabled"}, ) s3_logger.info( f"✅ Enabled versioning for bucket '{settings.s3_bucket}'" ) except Exception as ve: s3_logger.warning(f"⚠️ Could not enable versioning: {ve}") except ClientError as ce: s3_logger.warning( f"❌ Failed to create bucket '{settings.s3_bucket}': {ce}" ) raise else: s3_logger.warning( f"❌ Error accessing bucket '{settings.s3_bucket}': {e}" ) raise # Also create test bucket if specified and different from main bucket if settings.s3_test_bucket and settings.s3_test_bucket != settings.s3_bucket: try: client.head_bucket(Bucket=settings.s3_test_bucket) s3_logger.info( f"✅ Test bucket '{settings.s3_test_bucket}' already exists" ) except ClientError as e: if e.response["Error"]["Code"] == "404": try: if settings.s3_endpoint_url: client.create_bucket(Bucket=settings.s3_test_bucket) else: if settings.aws_region == "us-east-1": client.create_bucket(Bucket=settings.s3_test_bucket) else: client.create_bucket( Bucket=settings.s3_test_bucket, CreateBucketConfiguration={ "LocationConstraint": settings.aws_region }, ) s3_logger.info( f"✅ Created test bucket '{settings.s3_test_bucket}'" ) except ClientError as ce: s3_logger.warning( f"❌ Failed to create test bucket '{settings.s3_test_bucket}': {ce}" ) except Exception as e: s3_logger.warning(f"❌ Error during bucket initialization: {e}") ================================================ FILE: docker-compose.override.yml ================================================ services: api: build: context: . dockerfile: api.Dockerfile # Bind mounts volumes: - ./:/usr/src/app:ro - ./downloads:/app/downloads - ./logs:/usr/src/app/logs command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload minio: image: minio/minio:RELEASE.2025-05-24T17-08-30Z-cpuv1 command: server /data --console-address ":9001" ports: - "9000:9000" - "9001:9001" env_file: - app/.env healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] interval: 30s timeout: 20s retries: 3 volumes: - minio-data:/data volumes: minio-data: ================================================ FILE: docker-compose.prod.yml ================================================ services: api: image: jiisanda/docflow:1 command: uvicorn app.main:app --host 0.0.0.0 --port 8000 nginx: image: nginx:1.25.3-alpine ports: - "80:80" volumes: - ./nginx:/etc/nginx/conf.d ================================================ FILE: docker-compose.yml ================================================ services: api: depends_on: - postgres ports: - "8000:8000" env_file: app/.env postgres: image: postgres env_file: app/.env volumes: - postgres-db:/var/lib/postgresql ports: - "5432:5432" volumes: postgres-db: ================================================ FILE: hello.txt ================================================ ================================================ FILE: migrations/__init__.py ================================================ ================================================ FILE: migrations/env.py ================================================ from logging.config import fileConfig from sqlalchemy import engine_from_config from sqlalchemy import pool from alembic import context from app.db.models import Base from app.core.config import settings from app.db.tables.documents.documents_metadata import DocumentMetadata from app.db.tables.auth.auth import User from app.db.tables.documents.document_sharing import DocumentSharing from app.db.tables.documents.notify import Notify # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config config.set_main_option("sqlalchemy.url", settings.sync_database_url) # Interpret the config file for Python logging. # This line sets up loggers basically. if config.config_file_name is not None: fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_offline() -> None: """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure( url=url, target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, ) with context.begin_transaction(): context.run_migrations() def run_migrations_online() -> None: """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ connectable = engine_from_config( config.get_section(config.config_ini_section, {}), prefix="sqlalchemy.", poolclass=pool.NullPool, ) with connectable.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online() ================================================ FILE: migrations/script.py.mako ================================================ """${message} Revision ID: ${up_revision} Revises: ${down_revision | comma,n} Create Date: ${create_date} """ from typing import Sequence, Union from alembic import op import sqlalchemy as sa ${imports if imports else ""} # revision identifiers, used by Alembic. revision: str = ${repr(up_revision)} down_revision: Union[str, None] = ${repr(down_revision)} branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} def upgrade() -> None: ${upgrades if upgrades else "pass"} def downgrade() -> None: ${downgrades if downgrades else "pass"} ================================================ FILE: migrations/versions/2a02384ab925_initial_almebic.py ================================================ """Initial Almebic Revision ID: 2a02384ab925 Revises: Create Date: 2023-11-01 20:51:23.621851 """ from typing import Sequence, Union from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision: str = "2a02384ab925" down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.create_table( "notify", sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("receiver_id", sa.String(), nullable=False), sa.Column("message", sa.Text(), nullable=False), sa.Column( "status", sa.Enum("read", "unread", name="notifyenum"), nullable=True ), sa.Column( "notified_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()"), nullable=False, ), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_notify_id"), "notify", ["id"], unique=False) op.create_table( "users", sa.Column("id", sa.String(length=26), nullable=False), sa.Column("username", sa.String(), nullable=False), sa.Column("email", sa.String(), nullable=False), sa.Column("password", sa.Text(), nullable=False), sa.Column( "user_since", sa.TIMESTAMP(timezone=True), server_default=sa.text("now()"), nullable=False, ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("email"), sa.UniqueConstraint("username"), ) op.create_index(op.f("ix_users_id"), "users", ["id"], unique=True) op.create_table( "document_metadata", sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("owner_id", sa.String(), nullable=False), sa.Column("name", sa.String(), nullable=True), sa.Column("s3_url", sa.String(), nullable=True), sa.Column( "created_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()"), nullable=False, ), sa.Column("size", sa.Integer(), nullable=True), sa.Column("file_type", sa.String(), nullable=True), sa.Column("tags", sa.ARRAY(sa.String()), nullable=True), sa.Column("categories", sa.ARRAY(sa.String()), nullable=True), sa.Column( "status", sa.Enum( "public", "private", "shared", "deleted", "archived", name="statusenum" ), nullable=True, ), sa.Column("file_hash", sa.String(), nullable=True), sa.Column("access_to", sa.ARRAY(sa.String()), nullable=True), sa.ForeignKeyConstraint( ["owner_id"], ["users.id"], ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("s3_url"), ) op.create_index( op.f("ix_document_metadata_id"), "document_metadata", ["id"], unique=False ) op.create_table( "share_url", sa.Column("url_id", sa.String(), nullable=False), sa.Column("filename", sa.String(), nullable=False), sa.Column("owner_id", sa.String(), nullable=False), sa.Column("url", sa.String(), nullable=True), sa.Column("expires_at", sa.DateTime(timezone=True), nullable=True), sa.Column("visits", sa.Integer(), nullable=True), sa.Column("share_to", sa.ARRAY(sa.String()), nullable=True), sa.ForeignKeyConstraint( ["owner_id"], ["users.id"], ), sa.PrimaryKeyConstraint("url_id"), sa.UniqueConstraint("filename"), sa.UniqueConstraint("url"), sa.UniqueConstraint("url_id"), ) op.create_table( "doc_user_access", sa.Column("doc_id", postgresql.UUID(as_uuid=True), nullable=True), sa.Column("user_id", sa.String(length=26), nullable=True), sa.ForeignKeyConstraint( ["doc_id"], ["document_metadata.id"], ondelete="CASCADE" ), sa.ForeignKeyConstraint( ["user_id"], ["users.id"], ), sa.UniqueConstraint("doc_id", "user_id", name="uq_doc_user_access_doc_user"), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.drop_table("doc_user_access") op.drop_table("share_url") op.drop_index(op.f("ix_document_metadata_id"), table_name="document_metadata") op.drop_table("document_metadata") op.drop_index(op.f("ix_users_id"), table_name="users") op.drop_table("users") op.drop_index(op.f("ix_notify_id"), table_name="notify") op.drop_table("notify") # ### end Alembic commands ### ================================================ FILE: migrations/versions/__init__.py ================================================ ================================================ FILE: nginx/nginx.conf ================================================ server { listen 80; server_name _; # As we are using IP address or the public DNS of our EC2 instance location / { proxy_pass http://34.219.169.191:8000; } } ================================================ FILE: requirements/api.txt ================================================ alembic>=1.12.1 annotated-types>=0.6.0 anyio>=4.2.0 asyncpg==0.30.0 boto3>=1.34.34 botocore>=1.34.34 certifi>=2024.7.4 click>=8.1.7 colorama>=0.4.6 dnspython>=2.6.1 email-validator>=2.1.0 fastapi>=0.109.2 greenlet==3.1.1 h11>=0.14.0 httpcore>=0.17.3 httpx>=0.24.1 idna>=3.7 iniconfig>=2.0.0 jmespath>=1.0.1 Mako>=1.3.0 MarkupSafe>=2.1.3 packaging>=23.2 passlib~=1.7.4 pluggy>=1.3.0 psycopg2-binary==2.9.10 pyasn1>=0.5.1 pydantic>=2.8.2 pydantic-settings>=2.4.0 pydantic_core==2.23.4 pytest>=7.4.4 python-dateutil>=2.8.2 python-dotenv>=1.0.0 python-jose==3.3.0 python-multipart>=0.0.18 python-ulid>=2.2.0 rsa>=4.9 s3transfer>=0.10.0 six>=1.16.0 sniffio>=1.3.0 SQLAlchemy>=1.4.51 starlette>=0.40.0 typing_extensions>=4.9.0 urllib3>=2.2.2 uvicorn>=0.26.0