Initial commit on BE
This commit is contained in:
commit
04e1199ee9
14
.env.example
Normal file
14
.env.example
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
DEBUG=1
|
||||||
|
HOST=127.0.0.1
|
||||||
|
PORT=5000
|
||||||
|
DATABASE_URL=postgresql+asyncpg://satupeta_user:satupeta!QAZ2wsx@localhost:5432/satupeta
|
||||||
|
SECRET_KEY=tgAjg3fj4y4DtPCuIOlUH8cWFSxx9VZqvbXFsOiMnsmo2FF6NU
|
||||||
|
ALGORITHM=HS256
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES=1440
|
||||||
|
REFRESH_TOKEN_EXPIRE_DAYS=7
|
||||||
|
|
||||||
|
MINIO_ENDPOINT_URL=localhost:9000
|
||||||
|
MINIO_ROOT_USER=minioadmin
|
||||||
|
MINIO_ROOT_PASSWORD=minioadmin123
|
||||||
|
|
||||||
|
GEONETWORK_API_URL=https://geonetwork.jatimprov.go.id/geonetwork/srv/api/search/records/_search
|
||||||
174
.gitignore
vendored
Normal file
174
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,174 @@
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# UV
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
#uv.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||||
|
.pdm.toml
|
||||||
|
.pdm-python
|
||||||
|
.pdm-build/
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
# Ruff stuff:
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
|
# PyPI configuration file
|
||||||
|
.pypirc
|
||||||
82
.pre-commit-config.yaml
Normal file
82
.pre-commit-config.yaml
Normal file
|
|
@ -0,0 +1,82 @@
|
||||||
|
repos:
|
||||||
|
# Formatting tools
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 24.1.1
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
args:
|
||||||
|
- "--line-length=119"
|
||||||
|
- "--include=\\.pyi?$"
|
||||||
|
|
||||||
|
- repo: https://github.com/pycqa/isort
|
||||||
|
rev: 5.13.2
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
args:
|
||||||
|
- --profile=black
|
||||||
|
|
||||||
|
- repo: https://github.com/myint/autoflake
|
||||||
|
rev: v2.2.1
|
||||||
|
hooks:
|
||||||
|
- id: autoflake
|
||||||
|
args: [ --in-place, --remove-unused-variables, --remove-all-unused-imports ]
|
||||||
|
files: \.py$
|
||||||
|
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v3.15.0
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [ --py38-plus ]
|
||||||
|
|
||||||
|
# Code quality and linting
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v4.5.0
|
||||||
|
hooks:
|
||||||
|
- id: trailing-whitespace
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: fix-encoding-pragma
|
||||||
|
args: [ --remove ]
|
||||||
|
- id: check-yaml
|
||||||
|
- id: debug-statements
|
||||||
|
|
||||||
|
- repo: https://github.com/pre-commit/pygrep-hooks
|
||||||
|
rev: v1.10.0
|
||||||
|
hooks:
|
||||||
|
- id: python-use-type-annotations
|
||||||
|
|
||||||
|
|
||||||
|
# Dependency management
|
||||||
|
- repo: https://github.com/peterdemin/pip-compile-multi
|
||||||
|
rev: v2.6.2
|
||||||
|
hooks:
|
||||||
|
- id: pip-compile-multi-verify
|
||||||
|
|
||||||
|
- repo: https://github.com/asottile/setup-cfg-fmt
|
||||||
|
rev: v2.5.0
|
||||||
|
hooks:
|
||||||
|
- id: setup-cfg-fmt
|
||||||
|
args: [ "--max-py-version=3.12" ]
|
||||||
|
|
||||||
|
# Documentation and commit checks
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: rst-lint
|
||||||
|
name: rst
|
||||||
|
entry: rst-lint --encoding utf-8
|
||||||
|
files: ^(RELEASING.rst|README.rst|TIDELIFT.rst)$
|
||||||
|
language: python
|
||||||
|
|
||||||
|
- repo: https://github.com/commitizen-tools/commitizen
|
||||||
|
rev: v3.13.0
|
||||||
|
hooks:
|
||||||
|
- id: commitizen
|
||||||
|
stages: [ commit-msg ]
|
||||||
|
|
||||||
|
# Custom checks
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: pytest-staged
|
||||||
|
name: test on Staged
|
||||||
|
entry: sh -c 'pytest $(git diff --name-only --cached | grep -E "\\.py$") || exit 0'
|
||||||
|
stages: [ pre-commit ]
|
||||||
|
language: python
|
||||||
1
.python-version
Normal file
1
.python-version
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
3.13.0
|
||||||
67
Dockerfile
Normal file
67
Dockerfile
Normal file
|
|
@ -0,0 +1,67 @@
|
||||||
|
FROM python:3.13-slim-bookworm AS base
|
||||||
|
|
||||||
|
ENV POETRY_HOME="/opt/poetry" \
|
||||||
|
PYTHONPATH=/app \
|
||||||
|
PYTHONHASHSEED=0 \
|
||||||
|
POETRY_VERSION=1.7.1 \
|
||||||
|
POETRY_NO_INTERACTION=1 \
|
||||||
|
POETRY_VIRTUALENVS_CREATE=false \
|
||||||
|
POETRY_VIRTUALENVS_IN_PROJECT=false \
|
||||||
|
PYTHONWRITEBYTECODE=1 \
|
||||||
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
PYTHONUNBUFFERED=1 \
|
||||||
|
PATH="/opt/poetry/bin:$PATH"
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/var/cache/apt \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
libpq-dev \
|
||||||
|
locales \
|
||||||
|
locales-all \
|
||||||
|
libmagic1 \
|
||||||
|
libjemalloc2 \
|
||||||
|
procps && \
|
||||||
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
|
echo "id_ID.UTF-8 UTF-8" > /etc/locale.gen && \
|
||||||
|
locale-gen
|
||||||
|
|
||||||
|
ENV LD_PRELOAD="/usr/lib/x86_64-linux-gnu/libjemalloc.so.2"
|
||||||
|
ENV MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:30000,muzzy_decay_ms:30000"
|
||||||
|
|
||||||
|
FROM base AS builder
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/var/cache/apt \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
build-essential && \
|
||||||
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
|
curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python3 -
|
||||||
|
|
||||||
|
COPY pyproject.toml poetry.lock ./
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pypoetry \
|
||||||
|
poetry install --no-root --no-interaction --no-ansi
|
||||||
|
|
||||||
|
RUN apt-get autoremove -y && \
|
||||||
|
apt-get purge -y curl git build-essential && \
|
||||||
|
apt-get clean -y && \
|
||||||
|
rm -rf /root/.cache /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
FROM base AS app-image
|
||||||
|
|
||||||
|
COPY --from=builder /opt/poetry /opt/poetry
|
||||||
|
COPY --from=builder /usr/local/lib/python3.13/site-packages/ /usr/local/lib/python3.13/site-packages/
|
||||||
|
COPY --from=builder /usr/local/bin/ /usr/local/bin/
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
|
||||||
|
ENV PYTHONFAULTHANDLER=1 \
|
||||||
|
PYTHONHASHSEED=random \
|
||||||
|
PYTHONOPTIMIZE=2
|
||||||
|
|
||||||
|
EXPOSE 5000
|
||||||
|
|
||||||
|
CMD ["python", "-OO", "run.py"]
|
||||||
393
README.md
Normal file
393
README.md
Normal file
|
|
@ -0,0 +1,393 @@
|
||||||
|
# Portal Satu Peta Backend
|
||||||
|
|
||||||
|
Backend for the Portal Satu Peta application.
|
||||||
|
|
||||||
|
## Folder Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
├── .env.example # Example environment file
|
||||||
|
├── .github/ # GitHub Actions configuration
|
||||||
|
│ └── workflows/
|
||||||
|
│ └── deploy.yml # Workflow for deployment
|
||||||
|
├── .gitignore # Files and folders ignored by Git
|
||||||
|
├── .pre-commit-config.yaml # Pre-commit hooks configuration
|
||||||
|
├── Dockerfile # Instructions for building the Docker image
|
||||||
|
├── README.md # This file
|
||||||
|
├── alembic.ini # Alembic configuration for database migrations
|
||||||
|
├── app/ # Main application directory
|
||||||
|
│ ├── __init__.py
|
||||||
|
│ ├── api/ # API module (endpoints)
|
||||||
|
│ │ ├── dependencies/ # Dependencies for API (e.g., authentication)
|
||||||
|
│ │ └── v1/ # API version 1
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ └── routes/ # Route/endpoint definitions
|
||||||
|
│ ├── core/ # Core application configuration
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── config.py # Application settings (from environment variables)
|
||||||
|
│ │ ├── data_types.py # Custom data types
|
||||||
|
│ │ ├── database.py # Database configuration
|
||||||
|
│ │ ├── exceptions.py # Custom exceptions
|
||||||
|
│ │ ├── minio_client.py # Client for MinIO (object storage)
|
||||||
|
│ │ ├── params.py # Common parameters for requests
|
||||||
|
│ │ ├── responses.py # Standard response schemas
|
||||||
|
│ │ └── security.py # Security-related functions (password hashing, tokens)
|
||||||
|
│ ├── main.py # FastAPI application entry point
|
||||||
|
│ ├── models/ # SQLAlchemy model definitions (database tables)
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── base.py # Base model for SQLAlchemy
|
||||||
|
│ │ └── ... (other models)
|
||||||
|
│ ├── repositories/ # Data access logic (interaction with the database)
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── base.py # Base repository
|
||||||
|
│ │ └── ... (other repositories)
|
||||||
|
│ ├── schemas/ # Pydantic schemas (request/response data validation)
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── base.py # Base schema
|
||||||
|
│ │ └── ... (other schemas)
|
||||||
|
│ ├── services/ # Application business logic
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── base.py # Base service
|
||||||
|
│ │ └── ... (other services)
|
||||||
|
│ └── utils/ # General utilities
|
||||||
|
│ ├── __init__.py
|
||||||
|
│ ├── encryption.py # Encryption functions
|
||||||
|
│ ├── helpers.py # Helper functions
|
||||||
|
│ └── system.py # System-related utilities
|
||||||
|
├── assets/ # Static asset files (if any)
|
||||||
|
├── docker-compose.yml # Docker Compose configuration
|
||||||
|
├── migrations/ # Alembic database migration scripts
|
||||||
|
│ ├── README
|
||||||
|
│ ├── env.py
|
||||||
|
│ ├── script.py.mako
|
||||||
|
│ ├── scripts.py
|
||||||
|
│ └── versions/ # Migration version files
|
||||||
|
│ └── __init__.py
|
||||||
|
├── poetry.lock # Poetry dependency lock file
|
||||||
|
├── pyproject.toml # Poetry project configuration file
|
||||||
|
├── run.py # Script to run Uvicorn server locally
|
||||||
|
└── tests/ # Directory for unit and integration tests
|
||||||
|
├── __init__.py
|
||||||
|
├── conftest.py # Pytest configuration
|
||||||
|
├── test_api/
|
||||||
|
│ └── __init__.py
|
||||||
|
└── test_services/
|
||||||
|
└── __init__.py
|
||||||
|
```
|
||||||
|
|
||||||
|
## How to Run the Project
|
||||||
|
|
||||||
|
### 1. Initial Setup
|
||||||
|
|
||||||
|
* Ensure you have Python (version >=3.10, <4.0 recommended as per `pyproject.toml`) and Poetry installed.
|
||||||
|
* Copy the `.env.example` file to `.env` and customize its configuration, especially for database and MinIO connections.
|
||||||
|
```bash
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
* Edit the `.env` file as needed.
|
||||||
|
|
||||||
|
### 2. Running Locally (using Poetry and Uvicorn)
|
||||||
|
|
||||||
|
1. **Install dependencies:**
|
||||||
|
```bash
|
||||||
|
poetry install
|
||||||
|
```
|
||||||
|
2. **Run database migrations (if necessary):**
|
||||||
|
Ensure the database is running and the configuration in `.env` is correct.
|
||||||
|
```bash
|
||||||
|
poetry run alembic upgrade head
|
||||||
|
```
|
||||||
|
Alternatively, if there's a custom script for migrations as seen in `deploy.yml`:
|
||||||
|
```bash
|
||||||
|
poetry run python migrations/scripts.py
|
||||||
|
```
|
||||||
|
*(Check the content of `migrations/scripts.py` for the exact command if it differs)*
|
||||||
|
|
||||||
|
3. **Run the application server:**
|
||||||
|
```bash
|
||||||
|
poetry run python run.py
|
||||||
|
```
|
||||||
|
Or directly using Uvicorn:
|
||||||
|
```bash
|
||||||
|
poetry run uvicorn app.main:app --host 0.0.0.0 --port 5000 --reload
|
||||||
|
```
|
||||||
|
The application will run at `http://localhost:5000` (or as configured in `.env` and `run.py`).
|
||||||
|
|
||||||
|
### 3. Running Using Docker
|
||||||
|
|
||||||
|
1. **Ensure Docker and Docker Compose are installed.**
|
||||||
|
2. **Build and run the container:**
|
||||||
|
From the project root directory, run:
|
||||||
|
```bash
|
||||||
|
docker-compose up --build
|
||||||
|
```
|
||||||
|
If you have an `environment.env` file (as referenced in `docker-compose.yml`), ensure it exists and contains the necessary environment configurations. Otherwise, you might need to adjust `docker-compose.yml` to use the `.env` file or set environment variables directly.
|
||||||
|
|
||||||
|
The application will run at `http://localhost:5000` (as per port mapping in `docker-compose.yml`).
|
||||||
|
|
||||||
|
## How to Create an Endpoint, Model, Repository, and Service
|
||||||
|
|
||||||
|
This project follows a layered architecture pattern commonly used in FastAPI applications.
|
||||||
|
|
||||||
|
### 1. Creating a Model (`app/models/`)
|
||||||
|
|
||||||
|
Models represent tables in your database. They are defined using SQLAlchemy.
|
||||||
|
|
||||||
|
Example (e.g., `app/models/item_model.py`):
|
||||||
|
```python
|
||||||
|
from sqlalchemy import Column, Integer, String, ForeignKey, UUID
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
import uuid6
|
||||||
|
from . import Base # Ensure Base is imported from app.models
|
||||||
|
|
||||||
|
class ItemModel(Base):
|
||||||
|
__tablename__ = "items"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String, index=True)
|
||||||
|
description = Column(String, index=True)
|
||||||
|
owner_id = Column(UUID(as_uuid=True), ForeignKey("users.id")) # Example relationship
|
||||||
|
|
||||||
|
owner = relationship("UserModel", back_populates="items") # Adjust to your User model
|
||||||
|
```
|
||||||
|
* Don't forget to add the new model to `app/models/__init__.py` if necessary and create a database migration using Alembic.
|
||||||
|
```bash
|
||||||
|
poetry run alembic revision -m "create_items_table"
|
||||||
|
```
|
||||||
|
Then edit the newly created migration file in `migrations/versions/` to define the `upgrade()` and `downgrade()` functions, and run:
|
||||||
|
```bash
|
||||||
|
poetry run alembic upgrade head
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Creating a Schema (`app/schemas/`)
|
||||||
|
|
||||||
|
Pydantic schemas are used for request data validation and response data formatting.
|
||||||
|
|
||||||
|
Example (e.g., `app/schemas/item_schema.py`):
|
||||||
|
```python
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from app.core.data_types import UUID7Field # Or the appropriate UUID type
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
class ItemBase(BaseModel):
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
class ItemCreateSchema(ItemBase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ItemUpdateSchema(BaseModel):
|
||||||
|
name: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
class ItemSchema(ItemBase):
|
||||||
|
id: UUID7Field
|
||||||
|
owner_id: UUID7Field
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
orm_mode = True # or from_attributes = True for Pydantic v2
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Creating a Repository (`app/repositories/`)
|
||||||
|
|
||||||
|
Repositories are responsible for all database interactions related to a model.
|
||||||
|
|
||||||
|
Example (e.g., `app/repositories/item_repository.py`):
|
||||||
|
```python
|
||||||
|
from sqlalchemy import select
|
||||||
|
from fastapi_async_sqlalchemy import db # or the appropriate db session
|
||||||
|
from app.models.item_model import ItemModel # Import your model
|
||||||
|
from .base import BaseRepository # Import BaseRepository
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
|
||||||
|
class ItemRepository(BaseRepository[ItemModel]):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(ItemModel)
|
||||||
|
|
||||||
|
async def find_by_name(self, name: str) -> ItemModel | None:
|
||||||
|
query = select(self.model).filter(self.model.name == name)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
# Add other methods as needed (findById, create, update, delete, etc.)
|
||||||
|
# Example find_by_id from UserRepository:
|
||||||
|
async def find_by_id(self, id: UUID7Field) -> ItemModel | None:
|
||||||
|
query = select(self.model).filter(self.model.id == id)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
```
|
||||||
|
* Ensure to register the new repository in `app/api/dependencies/factory.py` if you are using the factory pattern for dependencies.
|
||||||
|
|
||||||
|
### 4. Creating a Service (`app/services/`)
|
||||||
|
|
||||||
|
Services contain the application's business logic. Services will use repositories to interact with data.
|
||||||
|
|
||||||
|
Example (e.g., `app/services/item_service.py`):
|
||||||
|
```python
|
||||||
|
from typing import Dict, List, Tuple, Union
|
||||||
|
from uuid6 import UUID # or from app.core.data_types import UUID7Field
|
||||||
|
from fastapi import HTTPException, status
|
||||||
|
|
||||||
|
from app.models.item_model import ItemModel
|
||||||
|
from app.repositories.item_repository import ItemRepository
|
||||||
|
from app.schemas.item_schema import ItemCreateSchema, ItemUpdateSchema, ItemSchema # Import your schemas
|
||||||
|
from app.schemas.user_schema import UserSchema # For user info performing the action
|
||||||
|
from .base import BaseService
|
||||||
|
from app.core.exceptions import NotFoundException
|
||||||
|
|
||||||
|
class ItemService(BaseService[ItemModel, ItemRepository]):
|
||||||
|
def __init__(self, repository: ItemRepository):
|
||||||
|
super().__init__(ItemModel, repository)
|
||||||
|
# self.user_service = user_service # If other services are needed
|
||||||
|
|
||||||
|
async def create_item(self, item_data: ItemCreateSchema, current_user: UserSchema) -> ItemModel:
|
||||||
|
# Business logic before creating the item
|
||||||
|
# For example, check if an item with the same name already exists
|
||||||
|
existing_item = await self.repository.find_by_name(item_data.name)
|
||||||
|
if existing_item:
|
||||||
|
raise HTTPException(status_code=400, detail="Item with this name already exists")
|
||||||
|
|
||||||
|
item_dict = item_data.model_dump()
|
||||||
|
item_dict['owner_id'] = current_user.id # Example of setting the owner
|
||||||
|
return await self.repository.create(item_dict)
|
||||||
|
|
||||||
|
async def get_item_by_id(self, item_id: UUID, current_user: UserSchema) -> ItemModel:
|
||||||
|
item = await self.repository.find_by_id(item_id)
|
||||||
|
if not item:
|
||||||
|
raise NotFoundException(f"Item with id {item_id} not found")
|
||||||
|
# Business logic for authorization, for example:
|
||||||
|
# if item.owner_id != current_user.id and not current_user.is_admin:
|
||||||
|
# raise HTTPException(status_code=403, detail="Not authorized to access this item")
|
||||||
|
return item
|
||||||
|
|
||||||
|
# Add other methods (update, delete, get_all, etc.)
|
||||||
|
```
|
||||||
|
* Ensure to register the new service in `app/api/dependencies/factory.py`.
|
||||||
|
|
||||||
|
### 5. Creating an Endpoint (`app/api/v1/routes/`)
|
||||||
|
|
||||||
|
Endpoints are the HTTP entry points to your application. They are defined using FastAPI APIRouter.
|
||||||
|
|
||||||
|
Example (e.g., `app/api/v1/routes/item_route.py`):
|
||||||
|
```python
|
||||||
|
from typing import List
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user # Authentication dependency
|
||||||
|
from app.api.dependencies.factory import Factory # Factory for service dependencies
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.schemas.item_schema import ItemCreateSchema, ItemSchema, ItemUpdateSchema # Your schemas
|
||||||
|
from app.schemas.user_schema import UserSchema # User schema for auth dependency
|
||||||
|
from app.services.item_service import ItemService # Your service
|
||||||
|
from app.schemas.base import PaginatedResponse # If using pagination
|
||||||
|
from app.core.params import CommonParams # If using common parameters
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.post("/items", response_model=ItemSchema, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def create_item(
|
||||||
|
item_in: ItemCreateSchema,
|
||||||
|
current_user: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: ItemService = Depends(Factory().get_item_service), # Get service from factory
|
||||||
|
):
|
||||||
|
item = await service.create_item(item_data=item_in, current_user=current_user)
|
||||||
|
return item
|
||||||
|
|
||||||
|
@router.get("/items/{item_id}", response_model=ItemSchema)
|
||||||
|
async def read_item(
|
||||||
|
item_id: UUID7Field,
|
||||||
|
current_user: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: ItemService = Depends(Factory().get_item_service),
|
||||||
|
):
|
||||||
|
item = await service.get_item_by_id(item_id=item_id, current_user=current_user)
|
||||||
|
return item
|
||||||
|
|
||||||
|
# Add other endpoints (GET all, PUT/PATCH, DELETE)
|
||||||
|
# Example GET all with pagination:
|
||||||
|
@router.get("/items", response_model=PaginatedResponse[ItemSchema])
|
||||||
|
async def get_items(
|
||||||
|
params: CommonParams = Depends(),
|
||||||
|
user_active: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: ItemService = Depends(Factory().get_item_service),
|
||||||
|
):
|
||||||
|
# Assume your service has a find_all method similar to UserService
|
||||||
|
items, total = await service.find_all(
|
||||||
|
filters=params.filter,
|
||||||
|
sort=params.sort,
|
||||||
|
search=params.search,
|
||||||
|
limit=params.limit,
|
||||||
|
offset=params.offset,
|
||||||
|
user=user_active, # For access control if needed
|
||||||
|
)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[ItemSchema.model_validate(item) for item in items],
|
||||||
|
total=total,
|
||||||
|
limit=params.limit,
|
||||||
|
offset=params.offset,
|
||||||
|
has_more=total > (offset + params.limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
```
|
||||||
|
* Register the new router in `app/api/v1/__init__.py` or `app/main.py`.
|
||||||
|
Example in `app/main.py`:
|
||||||
|
```python
|
||||||
|
// ... existing code ...
|
||||||
|
from app.api.v1.routes import item_route # Import your new router
|
||||||
|
// ... existing code ...
|
||||||
|
|
||||||
|
app.include_router(item_route.router, prefix="/api/v1", tags=["Items"])
|
||||||
|
// ... existing code ...
|
||||||
|
```
|
||||||
|
|
||||||
|
## How to Deploy
|
||||||
|
|
||||||
|
This project is configured to be deployed using GitHub Actions when there is a push to the `main` branch or via manual trigger.
|
||||||
|
|
||||||
|
The deployment process defined in <mcfile path="/.github/workflows/deploy.yml" name="deploy.yml"></mcfile> is as follows:
|
||||||
|
|
||||||
|
1. **Checkout Code**: The code from the repository is fetched.
|
||||||
|
2. **Set up Docker Buildx**: Prepares the environment for building Docker images.
|
||||||
|
3. **Build and Export Docker image**: The Docker image `portal-satu-peta-backend:latest` is built and exported as a `.tar` file.
|
||||||
|
* Uses cache from GitHub Actions (GHA) to speed up the build process.
|
||||||
|
4. **Copy Docker image to server via SCP**: The `portal-satu-peta-backend.tar` file is copied to the target server (defined by secrets `SSH_HOST`, `SSH_USER`, `SSH_PORT`, `SSH_PASSWORD`).
|
||||||
|
5. **Deploy container**: An SSH script is executed on the target server:
|
||||||
|
* **Load image**: The Docker image from the `.tar` file is loaded into Docker on the server.
|
||||||
|
* **Stop and Remove Old Container**: The old container named `portal-satu-peta-backend` is stopped and removed (if it exists).
|
||||||
|
* **Run New Container**: A new container is run from the newly loaded image:
|
||||||
|
* Container name: `portal-satu-peta-backend`
|
||||||
|
* Restart policy: `unless-stopped`
|
||||||
|
* Environment file: `/home/application/.env` (ensure this file exists and is configured on the server)
|
||||||
|
* Port mapping: `5000:5000` (host port 5000 to container port 5000)
|
||||||
|
* Health check configuration.
|
||||||
|
* **Run Migrations**: The command `docker exec portal-satu-peta-backend python migrations/scripts.py` is executed inside the newly running container to perform database migrations.
|
||||||
|
* **Prune Old Images**: Old, unused Docker images (older than 24 hours) are removed to save disk space.
|
||||||
|
* **Clean Up**: The copied `.tar` file is removed from the server.
|
||||||
|
* **Verify Status**: The status of the `portal-satu-peta-backend` container is verified.
|
||||||
|
|
||||||
|
### Server Requirements for Deployment:
|
||||||
|
|
||||||
|
* Linux server with SSH access.
|
||||||
|
* Docker installed on the server.
|
||||||
|
* An environment file (e.g., `/home/application/.env`) must exist on the server and contain the correct configurations for production (database, secret keys, etc.).
|
||||||
|
* The SSH user used must have permissions to run Docker commands and access the necessary paths.
|
||||||
|
|
||||||
|
### GitHub Secrets Configuration:
|
||||||
|
|
||||||
|
Ensure the following secrets are configured in your GitHub repository (Settings > Secrets and variables > Actions):
|
||||||
|
|
||||||
|
* `SSH_HOST`: IP address or hostname of the deployment server.
|
||||||
|
* `SSH_USER`: Username for SSH login to the server.
|
||||||
|
* `SSH_PORT`: SSH server port (usually 22).
|
||||||
|
* `SSH_PASSWORD`: Password for the SSH user. (Using SSH keys is highly recommended for security).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# local run
|
||||||
|
- pyenv local 3.13.0
|
||||||
|
- poetry env use python
|
||||||
|
- poetry install
|
||||||
|
- poetry install --no-root
|
||||||
|
|
||||||
|
|
||||||
|
# run
|
||||||
|
- poetry run uvicorn app.main:app --reload
|
||||||
106
alembic.ini
Normal file
106
alembic.ini
Normal file
|
|
@ -0,0 +1,106 @@
|
||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = migrations
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python-dateutil library that can be
|
||||||
|
# installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to alembic/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||||
|
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
sqlalchemy.url = ${DATABASE_URL}
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
0
app/api/dependencies/__init__.py
Normal file
0
app/api/dependencies/__init__.py
Normal file
100
app/api/dependencies/auth.py
Normal file
100
app/api/dependencies/auth.py
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import Depends, HTTPException, Request, status
|
||||||
|
from fastapi.security import OAuth2PasswordBearer
|
||||||
|
from fastapi.security.utils import get_authorization_scheme_param
|
||||||
|
from jose import JWTError
|
||||||
|
from pydantic import ValidationError
|
||||||
|
from pytz import timezone
|
||||||
|
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.security import decode_token
|
||||||
|
from app.models import UserModel
|
||||||
|
from app.schemas.token_schema import TokenPayload
|
||||||
|
from app.schemas.user_schema import UserSchema
|
||||||
|
from app.services import UserService
|
||||||
|
|
||||||
|
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/login")
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_user(
|
||||||
|
token: str = Depends(oauth2_scheme), user_service: UserService = Depends(Factory().get_user_service)
|
||||||
|
) -> UserModel:
|
||||||
|
"""Validate token and return current user."""
|
||||||
|
credentials_exception = HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Could not validate credentials",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload = decode_token(token)
|
||||||
|
token_data = TokenPayload(**payload)
|
||||||
|
|
||||||
|
if token_data.type != "access":
|
||||||
|
raise credentials_exception
|
||||||
|
|
||||||
|
user_id: Optional[str] = token_data.sub
|
||||||
|
if user_id is None:
|
||||||
|
raise credentials_exception
|
||||||
|
|
||||||
|
except (JWTError, ValidationError):
|
||||||
|
raise credentials_exception
|
||||||
|
|
||||||
|
user = await user_service.find_by_id(user_id)
|
||||||
|
if user is None:
|
||||||
|
raise credentials_exception
|
||||||
|
|
||||||
|
if not user.is_active:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Inactive user")
|
||||||
|
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
async def get_payload(request: Request, user_service: UserService = Depends(Factory().get_user_service)):
|
||||||
|
authorization: str = request.headers.get("Authorization")
|
||||||
|
if not authorization:
|
||||||
|
return None
|
||||||
|
|
||||||
|
scheme, token = get_authorization_scheme_param(authorization)
|
||||||
|
|
||||||
|
if scheme.lower() != "bearer" or not token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload = decode_token(token)
|
||||||
|
token_data = TokenPayload(**payload)
|
||||||
|
|
||||||
|
user_id: Optional[str] = token_data.sub
|
||||||
|
if user_id is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
user = await user_service.find_by_id(user_id)
|
||||||
|
if user is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return user
|
||||||
|
|
||||||
|
except (JWTError, ValidationError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_active_user(current_user: UserModel = Depends(get_current_user)) -> UserModel:
|
||||||
|
"""Check if current user is active."""
|
||||||
|
if not current_user.is_active or current_user.is_deleted:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Inactive user")
|
||||||
|
return current_user
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_active_admin(current_user: UserSchema = Depends(get_current_active_user)) -> UserModel:
|
||||||
|
"""Check if current admin is active."""
|
||||||
|
if current_user.role is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Unauthorized")
|
||||||
|
if current_user.role.name != "administrator":
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED, detail="Your role does not have access to this resource"
|
||||||
|
)
|
||||||
|
|
||||||
|
return current_user
|
||||||
0
app/api/dependencies/database.py
Normal file
0
app/api/dependencies/database.py
Normal file
165
app/api/dependencies/factory.py
Normal file
165
app/api/dependencies/factory.py
Normal file
|
|
@ -0,0 +1,165 @@
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from app.core.minio_client import MinioClient
|
||||||
|
from app.models import (
|
||||||
|
CategoryModel,
|
||||||
|
ClassificationModel,
|
||||||
|
CredentialModel,
|
||||||
|
FeedbackModel,
|
||||||
|
FileModel,
|
||||||
|
MapProjectionSystemModel,
|
||||||
|
MapsetHistoryModel,
|
||||||
|
MapsetModel,
|
||||||
|
MapSourceModel,
|
||||||
|
NewsModel,
|
||||||
|
OrganizationModel,
|
||||||
|
RefreshTokenModel,
|
||||||
|
RegionalModel,
|
||||||
|
RoleModel,
|
||||||
|
SourceUsageModel,
|
||||||
|
UserModel,
|
||||||
|
)
|
||||||
|
from app.repositories import (
|
||||||
|
CategoryRepository,
|
||||||
|
ClassificationRepository,
|
||||||
|
CredentialRepository,
|
||||||
|
FeedbackRepository,
|
||||||
|
FileRepository,
|
||||||
|
MapProjectionSystemRepository,
|
||||||
|
MapsetHistoryRepository,
|
||||||
|
MapsetRepository,
|
||||||
|
MapSourceRepository,
|
||||||
|
NewsRepository,
|
||||||
|
OrganizationRepository,
|
||||||
|
RegionalRepository,
|
||||||
|
RoleRepository,
|
||||||
|
SourceUsageRepository,
|
||||||
|
TokenRepository,
|
||||||
|
UserRepository,
|
||||||
|
)
|
||||||
|
from app.services import (
|
||||||
|
AuthService,
|
||||||
|
CategoryService,
|
||||||
|
CountService,
|
||||||
|
ClassificationService,
|
||||||
|
CredentialService,
|
||||||
|
FeedbackService,
|
||||||
|
FileService,
|
||||||
|
MapProjectionSystemService,
|
||||||
|
MapsetHistoryService,
|
||||||
|
MapsetService,
|
||||||
|
MapSourceService,
|
||||||
|
NewsService,
|
||||||
|
OrganizationService,
|
||||||
|
RegionalService,
|
||||||
|
RoleService,
|
||||||
|
UserService,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Factory:
|
||||||
|
organization_repository = staticmethod(partial(OrganizationRepository, OrganizationModel, MapsetModel))
|
||||||
|
role_repository = staticmethod(partial(RoleRepository, RoleModel))
|
||||||
|
user_repository = staticmethod(partial(UserRepository, UserModel))
|
||||||
|
token_repository = staticmethod(partial(TokenRepository, RefreshTokenModel))
|
||||||
|
news_repository = staticmethod(partial(NewsRepository, NewsModel))
|
||||||
|
file_repository = staticmethod(partial(FileRepository, FileModel))
|
||||||
|
credential_repository = staticmethod(partial(CredentialRepository, CredentialModel))
|
||||||
|
map_source_repository = staticmethod(partial(MapSourceRepository, MapSourceModel))
|
||||||
|
map_projection_system_repository = staticmethod(partial(MapProjectionSystemRepository, MapProjectionSystemModel))
|
||||||
|
category_repository = staticmethod(partial(CategoryRepository, CategoryModel))
|
||||||
|
classification_repository = staticmethod(partial(ClassificationRepository, ClassificationModel))
|
||||||
|
regional_repository = staticmethod(partial(RegionalRepository, RegionalModel))
|
||||||
|
mapset_repository = staticmethod(partial(MapsetRepository, MapsetModel))
|
||||||
|
mapset_history_repository = staticmethod(partial(MapsetHistoryRepository, MapsetHistoryModel))
|
||||||
|
map_source_usage_repository = staticmethod(partial(SourceUsageRepository, SourceUsageModel))
|
||||||
|
feedback_repository = staticmethod(partial(FeedbackRepository, FeedbackModel))
|
||||||
|
|
||||||
|
def get_auth_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return AuthService(
|
||||||
|
user_repository=self.user_repository(),
|
||||||
|
token_repository=self.token_repository(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_organization_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return OrganizationService(self.organization_repository())
|
||||||
|
|
||||||
|
def get_role_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return RoleService(self.role_repository())
|
||||||
|
|
||||||
|
def get_user_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return UserService(self.user_repository(), self.role_repository())
|
||||||
|
|
||||||
|
def get_news_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return NewsService(self.news_repository())
|
||||||
|
|
||||||
|
def get_file_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return FileService(self.file_repository(), MinioClient())
|
||||||
|
|
||||||
|
def get_credential_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return CredentialService(self.credential_repository())
|
||||||
|
|
||||||
|
def get_map_source_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return MapSourceService(self.map_source_repository())
|
||||||
|
|
||||||
|
def get_map_projection_system_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return MapProjectionSystemService(self.map_projection_system_repository())
|
||||||
|
|
||||||
|
def get_category_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return CategoryService(self.category_repository())
|
||||||
|
|
||||||
|
def get_classification_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return ClassificationService(self.classification_repository())
|
||||||
|
|
||||||
|
def get_regional_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return RegionalService(self.regional_repository())
|
||||||
|
|
||||||
|
def get_mapset_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return MapsetService(
|
||||||
|
self.mapset_repository(),
|
||||||
|
self.mapset_history_repository(),
|
||||||
|
self.map_source_usage_repository(),
|
||||||
|
self.get_file_service(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_mapset_history_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return MapsetHistoryService(self.mapset_history_repository())
|
||||||
|
|
||||||
|
def get_feedback_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
return FeedbackService(self.feedback_repository())
|
||||||
|
|
||||||
|
def get_count_service(
|
||||||
|
self,
|
||||||
|
):
|
||||||
|
# No repository dependencies needed for aggregated counts for now
|
||||||
|
return CountService()
|
||||||
40
app/api/v1/__init__.py
Normal file
40
app/api/v1/__init__.py
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from app.api.v1.routes import (
|
||||||
|
auth_router,
|
||||||
|
category_router,
|
||||||
|
count_router,
|
||||||
|
classification_router,
|
||||||
|
credential_router,
|
||||||
|
feedback_router,
|
||||||
|
file_router,
|
||||||
|
geonetwork_router,
|
||||||
|
map_projection_system_router,
|
||||||
|
map_source_router,
|
||||||
|
mapset_history_router,
|
||||||
|
mapset_router,
|
||||||
|
news_router,
|
||||||
|
organization_router,
|
||||||
|
regional_router,
|
||||||
|
role_router,
|
||||||
|
user_router,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
router.include_router(auth_router, tags=["Auth"])
|
||||||
|
router.include_router(category_router, tags=["Categories"])
|
||||||
|
router.include_router(classification_router, tags=["Classifications"])
|
||||||
|
router.include_router(credential_router, tags=["Credentials"])
|
||||||
|
router.include_router(feedback_router, tags=["Feedback"])
|
||||||
|
router.include_router(file_router, tags=["Files"])
|
||||||
|
router.include_router(geonetwork_router, tags=["GeoNetwork"])
|
||||||
|
router.include_router(count_router, tags=["Counts"])
|
||||||
|
router.include_router(organization_router, tags=["Organizations"])
|
||||||
|
router.include_router(map_source_router, tags=["Map Sources"])
|
||||||
|
router.include_router(map_projection_system_router, tags=["Map Projection Systems"])
|
||||||
|
router.include_router(mapset_router, tags=["Mapsets"])
|
||||||
|
router.include_router(mapset_history_router, tags=["Mapset Histories"])
|
||||||
|
router.include_router(news_router, tags=["News"])
|
||||||
|
router.include_router(regional_router, tags=["Regionals"])
|
||||||
|
router.include_router(role_router, tags=["Roles"])
|
||||||
|
router.include_router(user_router, tags=["Users"])
|
||||||
37
app/api/v1/routes/__init__.py
Normal file
37
app/api/v1/routes/__init__.py
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
from .auth_route import router as auth_router
|
||||||
|
from .category_route import router as category_router
|
||||||
|
from .classification_route import router as classification_router
|
||||||
|
from .credential_route import router as credential_router
|
||||||
|
from .feedback_route import router as feedback_router
|
||||||
|
from .file_route import router as file_router
|
||||||
|
from .count_route import router as count_router
|
||||||
|
from .geonetwork_route import router as geonetwork_router
|
||||||
|
from .map_projection_system_route import router as map_projection_system_router
|
||||||
|
from .map_source_route import router as map_source_router
|
||||||
|
from .mapset_history_route import router as mapset_history_router
|
||||||
|
from .mapset_route import router as mapset_router
|
||||||
|
from .news_route import router as news_router
|
||||||
|
from .organization_route import router as organization_router
|
||||||
|
from .regional_route import router as regional_router
|
||||||
|
from .role_route import router as role_router
|
||||||
|
from .user_route import router as user_router
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"organization_router",
|
||||||
|
"role_router",
|
||||||
|
"user_router",
|
||||||
|
"auth_router",
|
||||||
|
"news_router",
|
||||||
|
"file_router",
|
||||||
|
"credential_router",
|
||||||
|
"map_source_router",
|
||||||
|
"map_projection_system_router",
|
||||||
|
"category_router",
|
||||||
|
"regional_router",
|
||||||
|
"mapset_router",
|
||||||
|
"classification_router",
|
||||||
|
"mapset_history_router",
|
||||||
|
"feedback_router",
|
||||||
|
"geonetwork_router",
|
||||||
|
"count_router",
|
||||||
|
]
|
||||||
44
app/api/v1/routes/auth_route.py
Normal file
44
app/api/v1/routes/auth_route.py
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from fastapi.security import OAuth2PasswordRequestForm
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.schemas.token_schema import RefreshTokenSchema, Token
|
||||||
|
from app.schemas.user_schema import UserSchema
|
||||||
|
from app.services.auth_service import AuthService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/auth/login", response_model=Token)
|
||||||
|
async def login(
|
||||||
|
form_data: OAuth2PasswordRequestForm = Depends(), auth_service: AuthService = Depends(Factory().get_auth_service)
|
||||||
|
):
|
||||||
|
user = await auth_service.authenticate_user(form_data.username, form_data.password)
|
||||||
|
if not user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Incorrect username or password",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
return await auth_service.create_tokens(user.id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/auth/logout")
|
||||||
|
async def logout(
|
||||||
|
current_user: UserSchema = Depends(get_current_active_user),
|
||||||
|
auth_service: AuthService = Depends(Factory().get_auth_service),
|
||||||
|
):
|
||||||
|
await auth_service.logout(str(current_user.id))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/auth/refresh", response_model=Token)
|
||||||
|
async def refresh_token(
|
||||||
|
refresh_token: RefreshTokenSchema, auth_service: AuthService = Depends(Factory().get_auth_service)
|
||||||
|
):
|
||||||
|
return await auth_service.refresh_token(refresh_token.refresh_token)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/me", response_model=UserSchema)
|
||||||
|
async def read_users_me(current_user: UserSchema = Depends(get_current_active_user)):
|
||||||
|
return current_user
|
||||||
72
app/api/v1/routes/category_route.py
Normal file
72
app/api/v1/routes/category_route.py
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.category_schema import (
|
||||||
|
CategoryCreateSchema,
|
||||||
|
CategorySchema,
|
||||||
|
CategoryUpdateSchema,
|
||||||
|
)
|
||||||
|
from app.services import CategoryService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/categories", response_model=PaginatedResponse[CategorySchema])
|
||||||
|
async def get_categorys(
|
||||||
|
params: CommonParams = Depends(), service: CategoryService = Depends(Factory().get_category_service)
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
categorys, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[CategorySchema.model_validate(category) for category in categorys],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/categories/{id}", response_model=CategorySchema)
|
||||||
|
async def get_category(id: UUID7Field, service: CategoryService = Depends(Factory().get_category_service)):
|
||||||
|
category = await service.find_by_id(id)
|
||||||
|
return category
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/categories",
|
||||||
|
response_model=CategorySchema,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def create_category(
|
||||||
|
data: CategoryCreateSchema, service: CategoryService = Depends(Factory().get_category_service)
|
||||||
|
):
|
||||||
|
category = await service.create(data.dict())
|
||||||
|
return category
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/categories/{id}", response_model=CategorySchema, dependencies=[Depends(get_current_active_user)])
|
||||||
|
async def update_category(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: CategoryUpdateSchema,
|
||||||
|
service: CategoryService = Depends(Factory().get_category_service),
|
||||||
|
):
|
||||||
|
category = await service.update(id, data.dict(exclude_unset=True))
|
||||||
|
return category
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/categories/{id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def delete_category(id: UUID7Field, service: CategoryService = Depends(Factory().get_category_service)):
|
||||||
|
await service.delete(id)
|
||||||
78
app/api/v1/routes/classification_route.py
Normal file
78
app/api/v1/routes/classification_route.py
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas import (
|
||||||
|
ClassificationCreateSchema,
|
||||||
|
ClassificationSchema,
|
||||||
|
ClassificationUpdateSchema,
|
||||||
|
)
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.services import ClassificationService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/classifications", response_model=PaginatedResponse[ClassificationSchema])
|
||||||
|
async def get_classifications(
|
||||||
|
params: CommonParams = Depends(), service: ClassificationService = Depends(Factory().get_classification_service)
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
classifications, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[ClassificationSchema.model_validate(classification) for classification in classifications],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/classifications/{id}", response_model=ClassificationSchema)
|
||||||
|
async def get_classification(
|
||||||
|
id: UUID7Field, service: ClassificationService = Depends(Factory().get_classification_service)
|
||||||
|
):
|
||||||
|
classification = await service.find_by_id(id)
|
||||||
|
return classification
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/classifications",
|
||||||
|
response_model=ClassificationSchema,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def create_classification(
|
||||||
|
data: ClassificationCreateSchema, service: ClassificationService = Depends(Factory().get_classification_service)
|
||||||
|
):
|
||||||
|
classification = await service.create(data.dict())
|
||||||
|
return classification
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch(
|
||||||
|
"/classifications/{id}", response_model=ClassificationSchema, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def update_classification(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: ClassificationUpdateSchema,
|
||||||
|
service: ClassificationService = Depends(Factory().get_classification_service),
|
||||||
|
):
|
||||||
|
classification = await service.update(id, data.dict(exclude_unset=True))
|
||||||
|
return classification
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/classifications/{id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def delete_classification(
|
||||||
|
id: UUID7Field, service: ClassificationService = Depends(Factory().get_classification_service)
|
||||||
|
):
|
||||||
|
await service.delete(id)
|
||||||
14
app/api/v1/routes/count_route.py
Normal file
14
app/api/v1/routes/count_route.py
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.schemas.count_schema import CountSchema
|
||||||
|
from app.services.count_service import CountService
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/count", response_model=CountSchema)
|
||||||
|
async def get_counts(service: CountService = Depends(Factory().get_count_service)):
|
||||||
|
data = await service.get_counts()
|
||||||
|
return CountSchema(**data)
|
||||||
221
app/api/v1/routes/credential_route.py
Normal file
221
app/api/v1/routes/credential_route.py
Normal file
|
|
@ -0,0 +1,221 @@
|
||||||
|
from typing import Optional
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Path, Query, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_admin, get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.credential_schema import (
|
||||||
|
CredentialCreateSchema,
|
||||||
|
CredentialSchema,
|
||||||
|
CredentialUpdateSchema,
|
||||||
|
CredentialWithSensitiveDataSchema,
|
||||||
|
)
|
||||||
|
from app.schemas.user_schema import UserSchema
|
||||||
|
from app.services import CredentialService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/credentials",
|
||||||
|
summary="Buat kredensial baru",
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
async def create_credential(
|
||||||
|
data: CredentialCreateSchema,
|
||||||
|
current_user: UserSchema = Depends(get_current_active_admin),
|
||||||
|
service: CredentialService = Depends(Factory().get_credential_service),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Buat kredensial baru dengan data yang terenkripsi.
|
||||||
|
|
||||||
|
Endpoint ini hanya dapat diakses oleh admin.
|
||||||
|
|
||||||
|
**Data sensitive yang didukung:**
|
||||||
|
|
||||||
|
- Database:
|
||||||
|
- host, port, username, password, database_name
|
||||||
|
- MinIO:
|
||||||
|
- endpoint, access_key, secret_key, secure, bucket_name
|
||||||
|
- API:
|
||||||
|
- base_url, api_key
|
||||||
|
- SSH:
|
||||||
|
- host, port, username, password (atau private_key)
|
||||||
|
- SMTP:
|
||||||
|
- host, port, username, password, use_tls
|
||||||
|
- FTP:
|
||||||
|
- host, port, username, password
|
||||||
|
"""
|
||||||
|
result = await service.create_credential(
|
||||||
|
name=data.name,
|
||||||
|
credential_type=data.credential_type,
|
||||||
|
sensitive_data=data.sensitive_data,
|
||||||
|
credential_metadata=data.credential_metadata,
|
||||||
|
description=data.description,
|
||||||
|
is_default=data.is_default,
|
||||||
|
user_id=current_user.id,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/credentials",
|
||||||
|
response_model=PaginatedResponse[CredentialSchema],
|
||||||
|
summary="Dapatkan daftar kredensial",
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def get_credentials(
|
||||||
|
credential_type: Optional[str] = Query(None, description="Filter berdasarkan tipe kredensial"),
|
||||||
|
include_inactive: bool = Query(False, description="Sertakan kredensial yang tidak aktif"),
|
||||||
|
params: CommonParams = Depends(),
|
||||||
|
service: CredentialService = Depends(Factory().get_credential_service),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Dapatkan daftar kredensial dengan filtering and pagination.
|
||||||
|
"""
|
||||||
|
filter_params = params.filter or []
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
|
||||||
|
if credential_type:
|
||||||
|
filter_params.append(f"credential_type={credential_type}")
|
||||||
|
|
||||||
|
if not include_inactive:
|
||||||
|
filter_params.append(f"is_active=true")
|
||||||
|
|
||||||
|
credentials, total = await service.find_all(
|
||||||
|
filters=filter_params, sort=sort, search=search, group_by=group_by, limit=limit, offset=offset
|
||||||
|
)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[credential for credential in credentials],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=offset + limit < total,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/credentials/{credential_id}",
|
||||||
|
response_model=CredentialSchema,
|
||||||
|
summary="Dapatkan kredensial dengan data terdekripsi",
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def get_credential(
|
||||||
|
credential_id: UUID = Path(..., description="ID kredensial"),
|
||||||
|
service: CredentialService = Depends(Factory().get_credential_service),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Dapatkan kredensial dengan data sensitif yang sudah didekripsi.
|
||||||
|
|
||||||
|
Endpoint ini hanya dapat diakses oleh admin.
|
||||||
|
"""
|
||||||
|
credential = await service.find_by_id(credential_id)
|
||||||
|
|
||||||
|
return credential
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/credentials/{credential_id}/decrypted",
|
||||||
|
response_model=CredentialWithSensitiveDataSchema,
|
||||||
|
summary="Dapatkan kredensial dengan data terdekripsi",
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def get_credential_decrypted(
|
||||||
|
credential_id: UUID = Path(..., description="ID kredensial"),
|
||||||
|
service: CredentialService = Depends(Factory().get_credential_service),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Dapatkan kredensial dengan data sensitif yang sudah didekripsi.
|
||||||
|
|
||||||
|
Endpoint ini hanya dapat diakses oleh admin.
|
||||||
|
"""
|
||||||
|
credential = await service.get_credential_with_decrypted_data(credential_id)
|
||||||
|
|
||||||
|
return credential
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch(
|
||||||
|
"/credentials/{credential_id}",
|
||||||
|
summary="Update kredensial",
|
||||||
|
)
|
||||||
|
async def update_credential(
|
||||||
|
credential_id: UUID,
|
||||||
|
data: CredentialUpdateSchema,
|
||||||
|
current_user: UserSchema = Depends(get_current_active_admin),
|
||||||
|
service: CredentialService = Depends(Factory().get_credential_service),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Update kredensial.
|
||||||
|
|
||||||
|
Data sensitif bisa diupdate secara parsial. Field yang tidak disebutkan
|
||||||
|
dalam data.sensitive_data tidak akan diubah.
|
||||||
|
|
||||||
|
Endpoint ini hanya dapat diakses oleh admin.
|
||||||
|
"""
|
||||||
|
updated = await service.update_credential(credential_id, data.dict(exclude_unset=True), current_user.id)
|
||||||
|
return updated
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/credentials/{credential_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def delete_credential(
|
||||||
|
credential_id: UUID,
|
||||||
|
user: UserSchema = Depends(get_current_active_admin),
|
||||||
|
service: CredentialService = Depends(Factory().get_credential_service),
|
||||||
|
):
|
||||||
|
await service.delete(user, credential_id)
|
||||||
|
|
||||||
|
|
||||||
|
# @router.post(
|
||||||
|
# "/{credential_id}/test",
|
||||||
|
# response_model=CredentialTestResult,
|
||||||
|
# summary="Test koneksi menggunakan kredensial"
|
||||||
|
# )
|
||||||
|
# async def test_credential(
|
||||||
|
# credential_id: UUID,
|
||||||
|
# current_user: UserSchema = Depends(get_current_active_user),
|
||||||
|
# service: CredentialService = Depends(Factory().get_credential_service)
|
||||||
|
# ):
|
||||||
|
# """
|
||||||
|
# Test koneksi menggunakan kredensial.
|
||||||
|
|
||||||
|
# Hasil test berisi flag sukses dan detail tambahan.
|
||||||
|
# """
|
||||||
|
# result = await service.test_credential(
|
||||||
|
# service.db, credential_id, current_user.id
|
||||||
|
# )
|
||||||
|
# return result
|
||||||
|
|
||||||
|
|
||||||
|
# @router.delete(
|
||||||
|
# "/{credential_id}",
|
||||||
|
# status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
# summary="Hapus kredensial"
|
||||||
|
# )
|
||||||
|
# async def delete_credential(
|
||||||
|
# credential_id: UUID,
|
||||||
|
# current_user: UserSchema = Depends(get_current_admin_user),
|
||||||
|
# service: CredentialService = Depends(Factory().get_credential_service)
|
||||||
|
# ):
|
||||||
|
# """
|
||||||
|
# Hapus kredensial secara permanen.
|
||||||
|
|
||||||
|
# Endpoint ini hanya dapat diakses oleh admin.
|
||||||
|
# """
|
||||||
|
# credential = await service.find_by_id(service.db, credential_id)
|
||||||
|
# if not credential:
|
||||||
|
# raise HTTPException(
|
||||||
|
# status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
# detail="Credential not found"
|
||||||
|
# )
|
||||||
|
|
||||||
|
# await service.delete(service.db, credential_id)
|
||||||
|
|
||||||
|
# return None
|
||||||
78
app/api/v1/routes/feedback_route.py
Normal file
78
app/api/v1/routes/feedback_route.py
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Body, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.feedback_schema import (
|
||||||
|
FeedbackCreateSchema,
|
||||||
|
FeedbackSchema,
|
||||||
|
FeedbackUpdateSchema,
|
||||||
|
)
|
||||||
|
from app.services import FeedbackService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/feedback", response_model=PaginatedResponse[FeedbackSchema])
|
||||||
|
async def get_feedbacks(
|
||||||
|
params: CommonParams = Depends(),
|
||||||
|
service: FeedbackService = Depends(Factory().get_feedback_service),
|
||||||
|
user=Depends(get_current_active_user)
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
feedbacks, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[FeedbackSchema.model_validate(feedback) for feedback in feedbacks],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/feedback/{id}", response_model=FeedbackSchema)
|
||||||
|
async def get_feedback(
|
||||||
|
id: int,
|
||||||
|
service: FeedbackService = Depends(Factory().get_feedback_service),
|
||||||
|
user=Depends(get_current_active_user)
|
||||||
|
):
|
||||||
|
feedback = await service.find_by_id(id)
|
||||||
|
return feedback
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/feedback", response_model=FeedbackSchema, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def create_feedback(
|
||||||
|
data: FeedbackCreateSchema, service: FeedbackService = Depends(Factory().get_feedback_service)
|
||||||
|
):
|
||||||
|
feedback = await service.create(data.dict())
|
||||||
|
return feedback
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/feedback/{id}", response_model=FeedbackSchema)
|
||||||
|
async def update_feedback(
|
||||||
|
id: int,
|
||||||
|
data: FeedbackUpdateSchema,
|
||||||
|
service: FeedbackService = Depends(Factory().get_feedback_service),
|
||||||
|
user=Depends(get_current_active_user)
|
||||||
|
):
|
||||||
|
feedback = await service.update(id, data.dict(exclude_unset=True))
|
||||||
|
return feedback
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/feedback/{id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def delete_feedback(
|
||||||
|
id: int,
|
||||||
|
service: FeedbackService = Depends(Factory().get_feedback_service),
|
||||||
|
user=Depends(get_current_active_user)
|
||||||
|
):
|
||||||
|
await service.delete(id)
|
||||||
98
app/api/v1/routes/file_route.py
Normal file
98
app/api/v1/routes/file_route.py
Normal file
|
|
@ -0,0 +1,98 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import (
|
||||||
|
APIRouter,
|
||||||
|
Depends,
|
||||||
|
File,
|
||||||
|
Form,
|
||||||
|
HTTPException,
|
||||||
|
Response,
|
||||||
|
UploadFile,
|
||||||
|
status,
|
||||||
|
)
|
||||||
|
from fastapi.responses import StreamingResponse
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.models import UserModel
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.file_schema import FileSchema
|
||||||
|
from app.services import FileService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/files", response_model=PaginatedResponse[FileSchema], dependencies=[Depends(get_current_active_user)])
|
||||||
|
async def get_files(params: CommonParams = Depends(), service: FileService = Depends(Factory().get_file_service)):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
files, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[FileSchema.model_validate(file) for file in files],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/files/{id}", response_model=FileSchema)
|
||||||
|
async def get_file(id: UUID7Field, service: FileService = Depends(Factory().get_file_service)):
|
||||||
|
file = await service.find_by_id(id)
|
||||||
|
return file
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/files", response_model=FileSchema, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def upload_file(
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
description: Optional[str] = Form(None),
|
||||||
|
current_user: UserModel = Depends(get_current_active_user),
|
||||||
|
service: FileService = Depends(Factory().get_file_service),
|
||||||
|
):
|
||||||
|
result = await service.upload_file(file=file, description=description, user_id=current_user.id)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/files/{file_id}", response_model=FileSchema, summary="Dapatkan metadata file")
|
||||||
|
async def get_file_info(file_id: UUID7Field, service: FileService = Depends(Factory().get_file_service)):
|
||||||
|
file = await service.find_by_id(file_id)
|
||||||
|
if not file:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File tidak ditemukan")
|
||||||
|
return file
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/files/{file_id}/download", summary="Download file")
|
||||||
|
async def download_file(file_id: UUID7Field, service: FileService = Depends(Factory().get_file_service)):
|
||||||
|
file_content, object_info, file_model = await service.get_file_content(file_id)
|
||||||
|
|
||||||
|
async def iterfile():
|
||||||
|
try:
|
||||||
|
chunk = await file_content.content.read(8192)
|
||||||
|
while chunk:
|
||||||
|
yield chunk
|
||||||
|
chunk = await file_content.content.read(8192)
|
||||||
|
finally:
|
||||||
|
await file_content.release()
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
iterfile(),
|
||||||
|
media_type=file_model.content_type,
|
||||||
|
headers={"Content-Disposition": f'attachment; filename="{file_model.filename}"'},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/files/{file_id}", status_code=status.HTTP_204_NO_CONTENT, summary="Hapus file")
|
||||||
|
async def delete_file(
|
||||||
|
file_id: UUID7Field,
|
||||||
|
current_user: UserModel = Depends(get_current_active_user),
|
||||||
|
service: FileService = Depends(Factory().get_file_service),
|
||||||
|
):
|
||||||
|
await service.delete_file_with_content(file_id, str(current_user.id))
|
||||||
|
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||||
65
app/api/v1/routes/geonetwork_route.py
Normal file
65
app/api/v1/routes/geonetwork_route.py
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|
from fastapi import APIRouter, HTTPException, status
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/geonetwork-record")
|
||||||
|
async def get_geonetwork_record():
|
||||||
|
"""
|
||||||
|
Proxy endpoint untuk mengambil data dari GeoNetwork Jatim.
|
||||||
|
Melakukan request POST ke API GeoNetwork dengan query default.
|
||||||
|
"""
|
||||||
|
geonetwork_url = "https://geonetwork.jatimprov.go.id/geonetwork/srv/api/search/records/_search"
|
||||||
|
|
||||||
|
# Request body yang akan dikirim ke GeoNetwork API
|
||||||
|
request_body = {
|
||||||
|
"size": 0,
|
||||||
|
"track_total_hits": True,
|
||||||
|
"query": {
|
||||||
|
"bool": {
|
||||||
|
"must": {
|
||||||
|
"query_string": {
|
||||||
|
"query": "+isTemplate:n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"aggs": {
|
||||||
|
"resourceType": {
|
||||||
|
"terms": {
|
||||||
|
"field": "resourceType",
|
||||||
|
"size": 10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||||
|
response = await client.post(
|
||||||
|
geonetwork_url,
|
||||||
|
json=request_body,
|
||||||
|
headers={
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Accept": "application/json"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
except httpx.TimeoutException:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_504_GATEWAY_TIMEOUT,
|
||||||
|
detail="Request to GeoNetwork API timed out"
|
||||||
|
)
|
||||||
|
except httpx.HTTPStatusError as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=e.response.status_code,
|
||||||
|
detail=f"GeoNetwork API returned error: {e.response.text}"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail=f"Error connecting to GeoNetwork API: {str(e)}"
|
||||||
|
)
|
||||||
87
app/api/v1/routes/map_projection_system_route.py
Normal file
87
app/api/v1/routes/map_projection_system_route.py
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.map_projection_system_schema import (
|
||||||
|
MapProjectionSystemCreateSchema,
|
||||||
|
MapProjectionSystemSchema,
|
||||||
|
MapProjectionSystemUpdateSchema,
|
||||||
|
)
|
||||||
|
from app.services import MapProjectionSystemService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/map_projection_systems", response_model=PaginatedResponse[MapProjectionSystemSchema])
|
||||||
|
async def get_map_projection_systems(
|
||||||
|
params: CommonParams = Depends(),
|
||||||
|
service: MapProjectionSystemService = Depends(Factory().get_map_projection_system_service),
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
map_projection_systems, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[
|
||||||
|
MapProjectionSystemSchema.model_validate(map_projection_system)
|
||||||
|
for map_projection_system in map_projection_systems
|
||||||
|
],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/map_projection_systems/{id}", response_model=MapProjectionSystemSchema)
|
||||||
|
async def get_map_projection_system(
|
||||||
|
id: UUID7Field, service: MapProjectionSystemService = Depends(Factory().get_map_projection_system_service)
|
||||||
|
):
|
||||||
|
map_projection_system = await service.find_by_id(id)
|
||||||
|
return map_projection_system
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/map_projection_systems",
|
||||||
|
response_model=MapProjectionSystemSchema,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def create_map_projection_system(
|
||||||
|
data: MapProjectionSystemCreateSchema,
|
||||||
|
service: MapProjectionSystemService = Depends(Factory().get_map_projection_system_service),
|
||||||
|
):
|
||||||
|
map_projection_system = await service.create(data.dict())
|
||||||
|
return map_projection_system
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch(
|
||||||
|
"/map_projection_systems/{id}",
|
||||||
|
response_model=MapProjectionSystemSchema,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def update_map_projection_system(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: MapProjectionSystemUpdateSchema,
|
||||||
|
service: MapProjectionSystemService = Depends(Factory().get_map_projection_system_service),
|
||||||
|
):
|
||||||
|
map_projection_system = await service.update(id, data.dict(exclude_unset=True))
|
||||||
|
return map_projection_system
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/map_projection_systems/{id}",
|
||||||
|
status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def delete_map_projection_system(
|
||||||
|
id: UUID7Field, service: MapProjectionSystemService = Depends(Factory().get_map_projection_system_service)
|
||||||
|
):
|
||||||
|
await service.delete(id)
|
||||||
72
app/api/v1/routes/map_source_route.py
Normal file
72
app/api/v1/routes/map_source_route.py
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.map_source_schema import (
|
||||||
|
MapSourceCreateSchema,
|
||||||
|
MapSourceSchema,
|
||||||
|
MapSourceUpdateSchema,
|
||||||
|
)
|
||||||
|
from app.services import MapSourceService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/map_sources", response_model=PaginatedResponse[MapSourceSchema])
|
||||||
|
async def get_mapSources(
|
||||||
|
params: CommonParams = Depends(), service: MapSourceService = Depends(Factory().get_map_source_service)
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
mapSources, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[MapSourceSchema.model_validate(mapSource) for mapSource in mapSources],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/map_sources/{id}", response_model=MapSourceSchema)
|
||||||
|
async def get_mapSource(id: UUID7Field, service: MapSourceService = Depends(Factory().get_map_source_service)):
|
||||||
|
mapSource = await service.find_by_id(id)
|
||||||
|
return mapSource
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/map_sources",
|
||||||
|
response_model=MapSourceSchema,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def create_mapSource(
|
||||||
|
data: MapSourceCreateSchema, service: MapSourceService = Depends(Factory().get_map_source_service)
|
||||||
|
):
|
||||||
|
mapSource = await service.create(data.dict())
|
||||||
|
return mapSource
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/map_sources/{id}", response_model=MapSourceSchema, dependencies=[Depends(get_current_active_user)])
|
||||||
|
async def update_mapSource(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: MapSourceUpdateSchema,
|
||||||
|
service: MapSourceService = Depends(Factory().get_map_source_service),
|
||||||
|
):
|
||||||
|
mapSource = await service.update(id, data.dict(exclude_unset=True))
|
||||||
|
return mapSource
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/map_sources/{id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def delete_mapSource(id: UUID7Field, service: MapSourceService = Depends(Factory().get_map_source_service)):
|
||||||
|
await service.delete(id)
|
||||||
55
app/api/v1/routes/mapset_history_route.py
Normal file
55
app/api/v1/routes/mapset_history_route.py
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas import MapsetHistoryCreateSchema, MapsetHistorySchema
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.user_schema import UserSchema
|
||||||
|
from app.services import MapsetHistoryService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/histories",
|
||||||
|
response_model=PaginatedResponse[MapsetHistorySchema],
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def get_mapset_histories(
|
||||||
|
params: CommonParams = Depends(), service: MapsetHistoryService = Depends(Factory().get_mapset_history_service)
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
histories, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[MapsetHistorySchema.model_validate(history) for history in histories],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/histories", response_model=MapsetHistorySchema, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def record_history(
|
||||||
|
data: MapsetHistoryCreateSchema,
|
||||||
|
user: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: MapsetHistoryService = Depends(Factory().get_mapset_history_service),
|
||||||
|
):
|
||||||
|
return await service.create(user, data.dict())
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/histories/{id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def delete_history(
|
||||||
|
id: UUID7Field, service: MapsetHistoryService = Depends(Factory().get_mapset_history_service)
|
||||||
|
):
|
||||||
|
await service.delete(id)
|
||||||
136
app/api/v1/routes/mapset_route.py
Normal file
136
app/api/v1/routes/mapset_route.py
Normal file
|
|
@ -0,0 +1,136 @@
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Body, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user, get_payload
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.mapset_schema import (
|
||||||
|
MapsetByOrganizationSchema,
|
||||||
|
MapsetCreateSchema,
|
||||||
|
MapsetSchema,
|
||||||
|
MapsetUpdateSchema,
|
||||||
|
)
|
||||||
|
from app.schemas.user_schema import UserSchema
|
||||||
|
from app.services import MapsetService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/mapsets", response_model=PaginatedResponse[MapsetSchema])
|
||||||
|
async def get_mapsets(
|
||||||
|
params: CommonParams = Depends(),
|
||||||
|
user: UserSchema = Depends(get_payload),
|
||||||
|
service: MapsetService = Depends(Factory().get_mapset_service),
|
||||||
|
landing: bool = False,
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
mapsets, total = await service.find_all(user, filter, sort, search, group_by, limit, offset, landing)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[MapsetSchema.model_validate(mapset) for mapset in mapsets],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/mapsets/organization", response_model=PaginatedResponse[MapsetByOrganizationSchema])
|
||||||
|
async def get_mapsets_organization(
|
||||||
|
params: CommonParams = Depends(),
|
||||||
|
user: UserSchema = Depends(get_payload),
|
||||||
|
service: MapsetService = Depends(Factory().get_mapset_service),
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
mapsets, total = await service.find_all_group_by_organization(user, filter, sort, search, limit, offset)
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[MapsetByOrganizationSchema.model_validate(mapset) for mapset in mapsets],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/mapsets/{id}", response_model=MapsetSchema)
|
||||||
|
async def get_mapset(
|
||||||
|
id: UUID7Field,
|
||||||
|
user: UserSchema = Depends(get_payload),
|
||||||
|
service: MapsetService = Depends(Factory().get_mapset_service),
|
||||||
|
):
|
||||||
|
mapset = await service.find_by_id(id, user=user)
|
||||||
|
return mapset
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/mapsets", response_model=MapsetSchema, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def create_mapset(
|
||||||
|
data: MapsetCreateSchema,
|
||||||
|
user: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: MapsetService = Depends(Factory().get_mapset_service),
|
||||||
|
):
|
||||||
|
mapset = await service.create(user, data.dict())
|
||||||
|
return mapset
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/mapsets/color_scale", status_code=status.HTTP_200_OK)
|
||||||
|
async def create_color_scale(
|
||||||
|
source_url: str = Body(..., embed=True),
|
||||||
|
color_range: list[str] = Body(None, embed=True),
|
||||||
|
boundary_file_id: UUID7Field = Body(None, embed=True),
|
||||||
|
service: MapsetService = Depends(Factory().get_mapset_service),
|
||||||
|
):
|
||||||
|
result, rangelist = await service.generate_colorscale(source_url, color_range, boundary_file_id)
|
||||||
|
return {"data": result, "rangelist": rangelist}
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch(
|
||||||
|
"/mapsets/activation", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def update_mapset_activation(
|
||||||
|
ids: List[UUID7Field] = Body(...),
|
||||||
|
is_active: bool = Body(...),
|
||||||
|
service: MapsetService = Depends(Factory().get_mapset_service),
|
||||||
|
):
|
||||||
|
await service.bulk_update_activation(ids, is_active)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/mapsets/{id}", response_model=MapsetSchema, dependencies=[Depends(get_payload)])
|
||||||
|
async def update_mapset(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: MapsetUpdateSchema,
|
||||||
|
user: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: MapsetService = Depends(Factory().get_mapset_service),
|
||||||
|
):
|
||||||
|
mapset = await service.update(id, user, data.dict(exclude_unset=True))
|
||||||
|
return mapset
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/mapsets/{id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def delete_mapset(id: UUID7Field, service: MapsetService = Depends(Factory().get_mapset_service)):
|
||||||
|
await service.delete(id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/mapsets/{id}/download", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def increment_download_mapset(
|
||||||
|
id: UUID7Field,
|
||||||
|
service: MapsetService = Depends(Factory().get_mapset_service),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Increment download_count for the given mapset.
|
||||||
|
No file is downloaded; this only updates the counter.
|
||||||
|
"""
|
||||||
|
await service.increment_download_count(id)
|
||||||
75
app/api/v1/routes/news_route.py
Normal file
75
app/api/v1/routes/news_route.py
Normal file
|
|
@ -0,0 +1,75 @@
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Body, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.news_schema import NewsCreateSchema, NewsSchema, NewsUpdateSchema
|
||||||
|
from app.services import NewsService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/news", response_model=PaginatedResponse[NewsSchema])
|
||||||
|
async def get_newss(params: CommonParams = Depends(), service: NewsService = Depends(Factory().get_news_service)):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
newss, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[NewsSchema.model_validate(news) for news in newss],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/news/{id}", response_model=NewsSchema)
|
||||||
|
async def get_news(id: UUID7Field, service: NewsService = Depends(Factory().get_news_service)):
|
||||||
|
news = await service.find_by_id(id)
|
||||||
|
return news
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/news",
|
||||||
|
response_model=NewsSchema,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def create_news(data: NewsCreateSchema, service: NewsService = Depends(Factory().get_news_service)):
|
||||||
|
news = await service.create(data.dict())
|
||||||
|
return news
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch(
|
||||||
|
"/news/activation", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def update_news_activation(
|
||||||
|
ids: List[UUID7Field] = Body(...),
|
||||||
|
is_active: bool = Body(...),
|
||||||
|
service: NewsService = Depends(Factory().get_news_service),
|
||||||
|
):
|
||||||
|
await service.bulk_update_activation(ids, is_active)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/news/{id}", response_model=NewsSchema, dependencies=[Depends(get_current_active_user)])
|
||||||
|
async def update_news(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: NewsUpdateSchema,
|
||||||
|
service: NewsService = Depends(Factory().get_news_service),
|
||||||
|
):
|
||||||
|
news = await service.update(id, data.dict(exclude_unset=True))
|
||||||
|
return news
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/news/{id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)])
|
||||||
|
async def delete_news(id: UUID7Field, service: NewsService = Depends(Factory().get_news_service)):
|
||||||
|
await service.delete(id)
|
||||||
84
app/api/v1/routes/organization_route.py
Normal file
84
app/api/v1/routes/organization_route.py
Normal file
|
|
@ -0,0 +1,84 @@
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user, get_payload
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.organization_schema import (
|
||||||
|
OrganizationCreateSchema,
|
||||||
|
OrganizationSchema,
|
||||||
|
OrganizationUpdateSchema,
|
||||||
|
)
|
||||||
|
from app.schemas.user_schema import UserSchema
|
||||||
|
from app.services import OrganizationService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/organizations", response_model=PaginatedResponse[OrganizationSchema])
|
||||||
|
async def get_organizations(
|
||||||
|
params: CommonParams = Depends(),
|
||||||
|
user: UserSchema = Depends(get_payload),
|
||||||
|
service: OrganizationService = Depends(Factory().get_organization_service),
|
||||||
|
landing: bool = False,
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
organizations, total = await service.find_all(user, filter, sort, search, group_by, limit, offset, landing)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[OrganizationSchema.model_validate(organization) for organization in organizations],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/organizations/{id}", response_model=OrganizationSchema)
|
||||||
|
async def get_organization(
|
||||||
|
id: UUID7Field,
|
||||||
|
user: UserSchema = Depends(get_payload),
|
||||||
|
service: OrganizationService = Depends(Factory().get_organization_service),
|
||||||
|
):
|
||||||
|
organization = await service.get_organizations_by_id(user, id)
|
||||||
|
return organization
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/organizations",
|
||||||
|
response_model=OrganizationSchema,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def create_organization(
|
||||||
|
data: OrganizationCreateSchema, service: OrganizationService = Depends(Factory().get_organization_service)
|
||||||
|
):
|
||||||
|
organization = await service.create(data.dict())
|
||||||
|
return organization
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch(
|
||||||
|
"/organizations/{id}", response_model=OrganizationSchema, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def update_organization(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: OrganizationUpdateSchema,
|
||||||
|
service: OrganizationService = Depends(Factory().get_organization_service),
|
||||||
|
):
|
||||||
|
organization = await service.update(id, data.dict(exclude_unset=True))
|
||||||
|
return organization
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/organizations/{id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def delete_organization(
|
||||||
|
id: UUID7Field, service: OrganizationService = Depends(Factory().get_organization_service)
|
||||||
|
):
|
||||||
|
await service.delete(id)
|
||||||
68
app/api/v1/routes/regional_route.py
Normal file
68
app/api/v1/routes/regional_route.py
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas import RegionalCreateSchema, RegionalSchema, RegionalUpdateSchema
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.services import RegionalService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/regionals", response_model=PaginatedResponse[RegionalSchema])
|
||||||
|
async def get_regionals(
|
||||||
|
params: CommonParams = Depends(), service: RegionalService = Depends(Factory().get_regional_service)
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
regionals, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[RegionalSchema.model_validate(regional) for regional in regionals],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/regionals/{id}", response_model=RegionalSchema)
|
||||||
|
async def get_regional(id: UUID7Field, service: RegionalService = Depends(Factory().get_regional_service)):
|
||||||
|
regional = await service.find_by_id(id)
|
||||||
|
return regional
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/regionals",
|
||||||
|
response_model=RegionalSchema,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def create_regional(
|
||||||
|
data: RegionalCreateSchema, service: RegionalService = Depends(Factory().get_regional_service)
|
||||||
|
):
|
||||||
|
regional = await service.create(data.dict())
|
||||||
|
return regional
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/regionals/{id}", response_model=RegionalSchema, dependencies=[Depends(get_current_active_user)])
|
||||||
|
async def update_regional(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: RegionalUpdateSchema,
|
||||||
|
service: RegionalService = Depends(Factory().get_regional_service),
|
||||||
|
):
|
||||||
|
regional = await service.update(id, data.dict(exclude_unset=True))
|
||||||
|
return regional
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/regionals/{id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)]
|
||||||
|
)
|
||||||
|
async def delete_regional(id: UUID7Field, service: RegionalService = Depends(Factory().get_regional_service)):
|
||||||
|
await service.delete(id)
|
||||||
62
app/api/v1/routes/role_route.py
Normal file
62
app/api/v1/routes/role_route.py
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.role_schema import RoleCreateSchema, RoleSchema, RoleUpdateSchema
|
||||||
|
from app.services import RoleService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/roles", response_model=PaginatedResponse[RoleSchema])
|
||||||
|
async def get_roles(params: CommonParams = Depends(), service: RoleService = Depends(Factory().get_role_service)):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
roles, total = await service.find_all(filter, sort, search, group_by, limit, offset)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[RoleSchema.model_validate(role) for role in roles],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/roles/{id}", response_model=RoleSchema)
|
||||||
|
async def get_role(id: UUID7Field, service: RoleService = Depends(Factory().get_role_service)):
|
||||||
|
role = await service.find_by_id(id)
|
||||||
|
return role
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/roles",
|
||||||
|
response_model=RoleSchema,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def create_role(data: RoleCreateSchema, service: RoleService = Depends(Factory().get_role_service)):
|
||||||
|
role = await service.create(data.dict())
|
||||||
|
return role
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/roles/{id}", response_model=RoleSchema, dependencies=[Depends(get_current_active_user)])
|
||||||
|
async def update_role(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: RoleUpdateSchema,
|
||||||
|
service: RoleService = Depends(Factory().get_role_service),
|
||||||
|
):
|
||||||
|
role = await service.update(id, data.dict(exclude_unset=True))
|
||||||
|
return role
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/roles/{id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_active_user)])
|
||||||
|
async def delete_role(id: UUID7Field, service: RoleService = Depends(Factory().get_role_service)):
|
||||||
|
await service.delete(id)
|
||||||
94
app/api/v1/routes/user_route.py
Normal file
94
app/api/v1/routes/user_route.py
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Body, Depends, status
|
||||||
|
|
||||||
|
from app.api.dependencies.auth import get_current_active_user
|
||||||
|
from app.api.dependencies.factory import Factory
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.params import CommonParams
|
||||||
|
from app.schemas.base import PaginatedResponse
|
||||||
|
from app.schemas.user_schema import UserCreateSchema, UserSchema, UserUpdateSchema
|
||||||
|
from app.services import UserService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/users", response_model=PaginatedResponse[UserSchema])
|
||||||
|
async def get_users(
|
||||||
|
params: CommonParams = Depends(),
|
||||||
|
user_active: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: UserService = Depends(Factory().get_user_service),
|
||||||
|
):
|
||||||
|
filter = params.filter
|
||||||
|
sort = params.sort
|
||||||
|
search = params.search
|
||||||
|
group_by = params.group_by
|
||||||
|
limit = params.limit
|
||||||
|
offset = params.offset
|
||||||
|
users, total = await service.find_all(
|
||||||
|
filters=filter,
|
||||||
|
sort=sort,
|
||||||
|
search=search,
|
||||||
|
group_by=group_by,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
user=user_active,
|
||||||
|
)
|
||||||
|
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=[UserSchema.model_validate(user) for user in users],
|
||||||
|
total=total,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
has_more=total > (offset + limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/users/{id}", response_model=UserSchema)
|
||||||
|
async def get_user(
|
||||||
|
id: UUID7Field,
|
||||||
|
user: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: UserService = Depends(Factory().get_user_service),
|
||||||
|
):
|
||||||
|
user = await service.find_by_id(id, user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/users", response_model=UserSchema, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def create_user(
|
||||||
|
data: UserCreateSchema,
|
||||||
|
user: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: UserService = Depends(Factory().get_user_service),
|
||||||
|
):
|
||||||
|
user = await service.create(data.dict(), user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/users/activation", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def update_user_activation(
|
||||||
|
ids: List[UUID7Field] = Body(...),
|
||||||
|
is_active: bool = Body(...),
|
||||||
|
user: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: UserService = Depends(Factory().get_user_service),
|
||||||
|
):
|
||||||
|
await service.bulk_update_activation(ids, is_active, user)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/users/{id}", response_model=UserSchema)
|
||||||
|
async def update_user(
|
||||||
|
id: UUID7Field,
|
||||||
|
data: UserUpdateSchema,
|
||||||
|
user: UserSchema = Depends(get_current_active_user),
|
||||||
|
service: UserService = Depends(Factory().get_user_service),
|
||||||
|
):
|
||||||
|
user = await service.update(id, data.dict(exclude_unset=True), user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/users/{id}",
|
||||||
|
status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
dependencies=[Depends(get_current_active_user)],
|
||||||
|
)
|
||||||
|
async def delete_user(id: UUID7Field, service: UserService = Depends(Factory().get_user_service)):
|
||||||
|
await service.delete(id)
|
||||||
0
app/core/__init__.py
Normal file
0
app/core/__init__.py
Normal file
88
app/core/config.py
Normal file
88
app/core/config.py
Normal file
|
|
@ -0,0 +1,88 @@
|
||||||
|
from functools import lru_cache
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
from app.utils.system import get_optimal_workers
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
# Application settings
|
||||||
|
PROJECT_NAME: str = Field(default="Satu Peta")
|
||||||
|
VERSION: str = Field(default="0.1.0")
|
||||||
|
DESCRIPTION: str = Field(default="Satu Peta API")
|
||||||
|
|
||||||
|
# Server settings
|
||||||
|
DEBUG: bool = Field(default=False)
|
||||||
|
HOST: str = Field(default="127.0.0.1")
|
||||||
|
PORT: int = Field(default=8001)
|
||||||
|
WORKERS: int = Field(default=get_optimal_workers())
|
||||||
|
LOG_LEVEL: str = Field(default="info")
|
||||||
|
LOOP: str = Field(default="uvloop")
|
||||||
|
HTTP: str = Field(default="httptools")
|
||||||
|
LIMIT_CONCURRENCY: int = Field(default=100)
|
||||||
|
BACKLOG: int = Field(default=2048)
|
||||||
|
LIMIT_MAX_REQUESTS: int | None = Field(default=None)
|
||||||
|
TIMEOUT_KEEP_ALIVE: int = Field(default=5)
|
||||||
|
H11_MAX_INCOMPLETE_EVENT_SIZE: int = Field(default=16 * 1024)
|
||||||
|
SERVER_HEADER: str = Field(default=f"{PROJECT_NAME}/{VERSION}")
|
||||||
|
FORWARDED_ALLOW_IPS: str = Field(default="*")
|
||||||
|
DATE_HEADER: bool = Field(default=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ACCESS_LOG(self) -> bool:
|
||||||
|
return self.DEBUG
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DATABASE_URL: str
|
||||||
|
|
||||||
|
# Security settings
|
||||||
|
SECRET_KEY: str
|
||||||
|
ALGORITHM: str = Field(default="HS256")
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES: int = Field(default=30)
|
||||||
|
REFRESH_TOKEN_EXPIRE_DAYS: int = Field(default=7)
|
||||||
|
|
||||||
|
# Cors settings
|
||||||
|
ALLOWED_ORIGINS: List[str] = Field(default=["*"])
|
||||||
|
|
||||||
|
# S3/MinIO settings
|
||||||
|
MINIO_ENDPOINT_URL: str = Field(default="http://localhost:9000")
|
||||||
|
MINIO_ROOT_USER: str
|
||||||
|
MINIO_ROOT_PASSWORD: str
|
||||||
|
MINIO_SECURE: Optional[bool] = False
|
||||||
|
MINIO_BUCKET_NAME: Optional[str] = Field(default="satu-peta")
|
||||||
|
MINIO_REGION: Optional[str] = Field(default=None)
|
||||||
|
|
||||||
|
MAX_UPLOAD_SIZE: int = 100 * 1024 * 1024 # 100MB default limit
|
||||||
|
ALLOWED_EXTENSIONS: List[str] = [
|
||||||
|
"jpg",
|
||||||
|
"jpeg",
|
||||||
|
"png",
|
||||||
|
"pdf",
|
||||||
|
"doc",
|
||||||
|
"docx",
|
||||||
|
"xls",
|
||||||
|
"xlsx",
|
||||||
|
"txt",
|
||||||
|
"csv",
|
||||||
|
"zip",
|
||||||
|
"rar",
|
||||||
|
"json",
|
||||||
|
]
|
||||||
|
|
||||||
|
TIMEZONE: str = Field(default="Asia/Jakarta")
|
||||||
|
|
||||||
|
# GeoNetwork settings
|
||||||
|
GEONETWORK_API_URL: str = Field(default="https://geonetwork.jatimprov.go.id/geonetwork/srv/api/search/records/_search")
|
||||||
|
|
||||||
|
# Settings config
|
||||||
|
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=True, extra="allow")
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache
|
||||||
|
def get_settings() -> Settings:
|
||||||
|
return Settings()
|
||||||
|
|
||||||
|
|
||||||
|
settings = get_settings()
|
||||||
39
app/core/data_types.py
Normal file
39
app/core/data_types.py
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pydantic import GetCoreSchemaHandler
|
||||||
|
from pydantic_core import CoreSchema, PydanticCustomError
|
||||||
|
from pydantic_core.core_schema import (
|
||||||
|
is_instance_schema,
|
||||||
|
json_or_python_schema,
|
||||||
|
no_info_plain_validator_function,
|
||||||
|
plain_serializer_function_ser_schema,
|
||||||
|
str_schema,
|
||||||
|
union_schema,
|
||||||
|
)
|
||||||
|
from uuid6 import UUID
|
||||||
|
|
||||||
|
|
||||||
|
class UUID7Field(UUID):
|
||||||
|
@classmethod
|
||||||
|
def __get_pydantic_core_schema__(
|
||||||
|
cls,
|
||||||
|
_source_type: Any,
|
||||||
|
handler: GetCoreSchemaHandler,
|
||||||
|
) -> CoreSchema:
|
||||||
|
return json_or_python_schema(
|
||||||
|
json_schema=str_schema(),
|
||||||
|
python_schema=union_schema([is_instance_schema(cls), no_info_plain_validator_function(cls.validate)]),
|
||||||
|
serialization=plain_serializer_function_ser_schema(
|
||||||
|
lambda x: str(x),
|
||||||
|
return_schema=str_schema(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, v):
|
||||||
|
if isinstance(v, UUID):
|
||||||
|
return v
|
||||||
|
try:
|
||||||
|
return UUID(str(v))
|
||||||
|
except ValueError as e:
|
||||||
|
raise PydanticCustomError("uuid_parsing", "Invalid UUID format") from e
|
||||||
24
app/core/database.py
Normal file
24
app/core/database.py
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from sqlalchemy.ext.asyncio import create_async_engine
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")))
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.models import Base
|
||||||
|
|
||||||
|
engine = create_async_engine(settings.DATABASE_URL, echo=True)
|
||||||
|
|
||||||
|
|
||||||
|
async def create_tables():
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(create_tables())
|
||||||
63
app/core/exceptions.py
Normal file
63
app/core/exceptions.py
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
from http import HTTPStatus
|
||||||
|
from typing import Any, Dict, Optional, Type
|
||||||
|
|
||||||
|
from fastapi import status
|
||||||
|
|
||||||
|
|
||||||
|
class APIException(Exception):
|
||||||
|
status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||||
|
default_message: str = "Internal server error"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, message: Optional[str] = None, detail: Optional[Any] = None, headers: Optional[Dict[str, str]] = None
|
||||||
|
):
|
||||||
|
self.message = message or self.default_message
|
||||||
|
self.detail = detail
|
||||||
|
self.headers = headers or {}
|
||||||
|
|
||||||
|
super().__init__(self.message)
|
||||||
|
|
||||||
|
|
||||||
|
def create_exception(name: str, status_code: int, default_message: str) -> Type[APIException]:
|
||||||
|
return type(name, (APIException,), {"status_code": status_code, "default_message": default_message})
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_error_response(message: str, detail: Any = None, error_type: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
response = {"detail": message}
|
||||||
|
|
||||||
|
if detail is not None:
|
||||||
|
if isinstance(detail, dict):
|
||||||
|
response.update(detail)
|
||||||
|
else:
|
||||||
|
response["additional_info"] = detail
|
||||||
|
|
||||||
|
if error_type:
|
||||||
|
response["error_type"] = error_type
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
BadRequestException = create_exception(
|
||||||
|
"BadRequestException", status.HTTP_400_BAD_REQUEST, HTTPStatus.BAD_REQUEST.description
|
||||||
|
)
|
||||||
|
|
||||||
|
NotFoundException = create_exception("NotFoundException", status.HTTP_404_NOT_FOUND, HTTPStatus.NOT_FOUND.description)
|
||||||
|
|
||||||
|
ForbiddenException = create_exception(
|
||||||
|
"ForbiddenException", status.HTTP_403_FORBIDDEN, HTTPStatus.FORBIDDEN.description
|
||||||
|
)
|
||||||
|
|
||||||
|
UnauthorizedException = create_exception(
|
||||||
|
"UnauthorizedException", status.HTTP_401_UNAUTHORIZED, HTTPStatus.UNAUTHORIZED.description
|
||||||
|
)
|
||||||
|
|
||||||
|
UnprocessableEntity = create_exception(
|
||||||
|
"UnprocessableEntity", status.HTTP_422_UNPROCESSABLE_ENTITY, HTTPStatus.UNPROCESSABLE_ENTITY.description
|
||||||
|
)
|
||||||
|
|
||||||
|
DuplicateValueException = create_exception(
|
||||||
|
"DuplicateValueException", status.HTTP_422_UNPROCESSABLE_ENTITY, HTTPStatus.UNPROCESSABLE_ENTITY.description
|
||||||
|
)
|
||||||
|
InvalidInputException = create_exception(
|
||||||
|
"InvalidInputException", status.HTTP_422_UNPROCESSABLE_ENTITY, HTTPStatus.UNPROCESSABLE_ENTITY.description
|
||||||
|
)
|
||||||
193
app/core/minio_client.py
Normal file
193
app/core/minio_client.py
Normal file
|
|
@ -0,0 +1,193 @@
|
||||||
|
from typing import Any, BinaryIO, Dict, List, Optional, Tuple
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from fastapi import HTTPException, status
|
||||||
|
from miniopy_async import Minio
|
||||||
|
from miniopy_async.error import S3Error
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
|
||||||
|
class MinioClient:
|
||||||
|
"""
|
||||||
|
Client class untuk interaksi dengan MinIO Object Storage secara asinkron.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.client = Minio(
|
||||||
|
endpoint=settings.MINIO_ENDPOINT_URL,
|
||||||
|
access_key=settings.MINIO_ROOT_USER,
|
||||||
|
secret_key=settings.MINIO_ROOT_PASSWORD,
|
||||||
|
secure=settings.MINIO_SECURE,
|
||||||
|
region=settings.MINIO_REGION,
|
||||||
|
)
|
||||||
|
self.bucket_name = settings.MINIO_BUCKET_NAME
|
||||||
|
|
||||||
|
async def init_bucket(self) -> None:
|
||||||
|
"""
|
||||||
|
Inisialisasi bucket jika belum ada.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if not await self.client.bucket_exists(self.bucket_name):
|
||||||
|
await self.client.make_bucket(self.bucket_name)
|
||||||
|
# Set bucket policy agar dapat diakses publik jika diperlukan
|
||||||
|
# policy = {...} # Define your policy if needed
|
||||||
|
# await self.client.set_bucket_policy(self.bucket_name, json.dumps(policy))
|
||||||
|
except S3Error as err:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail=f"Error initializing MinIO bucket: {str(err)}",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def upload_file(
|
||||||
|
self,
|
||||||
|
file_data: BinaryIO,
|
||||||
|
object_name: str,
|
||||||
|
content_type: str,
|
||||||
|
content_length: int,
|
||||||
|
metadata: Optional[Dict[str, str]] = None,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Upload file ke MinIO.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_data: File-like object untuk diupload
|
||||||
|
object_name: Nama objek di MinIO
|
||||||
|
content_type: Tipe konten file
|
||||||
|
metadata: Metadata tambahan untuk objek
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
URL objek yang telah diupload
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
await self.init_bucket()
|
||||||
|
|
||||||
|
# Upload file
|
||||||
|
await self.client.put_object(
|
||||||
|
bucket_name=self.bucket_name,
|
||||||
|
object_name=object_name,
|
||||||
|
data=file_data,
|
||||||
|
length=content_length,
|
||||||
|
content_type=content_type,
|
||||||
|
metadata=metadata,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate URL
|
||||||
|
url = await self.get_file_url(object_name)
|
||||||
|
return url
|
||||||
|
|
||||||
|
except S3Error as err:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error uploading file to MinIO: {str(err)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_file(self, object_name: str) -> Tuple[BinaryIO, Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Ambil file dari MinIO.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
object_name: Nama objek di MinIO
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple dari (file data, object info)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
stat = await self.client.stat_object(bucket_name=self.bucket_name, object_name=object_name)
|
||||||
|
|
||||||
|
session = aiohttp.ClientSession()
|
||||||
|
response = await self.client.get_object(
|
||||||
|
bucket_name=self.bucket_name, object_name=object_name, session=session
|
||||||
|
)
|
||||||
|
|
||||||
|
return response, stat.__dict__
|
||||||
|
|
||||||
|
except S3Error as err:
|
||||||
|
if err.code == "NoSuchKey":
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File not found")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail=f"Error retrieving file from MinIO: {str(err)}",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def delete_file(self, object_name: str) -> bool:
|
||||||
|
"""
|
||||||
|
Hapus file dari MinIO.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
object_name: Nama objek di MinIO
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Boolean yang menunjukkan keberhasilan operasi
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
await self.client.remove_object(self.bucket_name, object_name)
|
||||||
|
return True
|
||||||
|
except S3Error as err:
|
||||||
|
if err.code == "NoSuchKey":
|
||||||
|
return False
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error deleting file from MinIO: {str(err)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_file_url(self, object_name: str) -> str:
|
||||||
|
"""
|
||||||
|
Dapatkan URL untuk mengakses file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
object_name: Nama objek di MinIO
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
URL untuk mengakses file
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# For public access
|
||||||
|
if settings.MINIO_SECURE:
|
||||||
|
protocol = "https"
|
||||||
|
else:
|
||||||
|
protocol = "http"
|
||||||
|
|
||||||
|
parsed_endpoint = urlparse(settings.MINIO_ENDPOINT_URL)
|
||||||
|
host = parsed_endpoint.netloc or settings.MINIO_ENDPOINT_URL
|
||||||
|
|
||||||
|
return f"{protocol}://{host}/{self.bucket_name}/{object_name}"
|
||||||
|
|
||||||
|
# For presigned URL (time-limited access):
|
||||||
|
# return await self.client.presigned_get_object(
|
||||||
|
# bucket_name=self.bucket_name,
|
||||||
|
# object_name=object_name,
|
||||||
|
# expires=timedelta(hours=1)
|
||||||
|
# )
|
||||||
|
|
||||||
|
except S3Error as err:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error generating URL: {str(err)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def list_files(self, prefix: str = "", recursive: bool = True) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Daftar semua file di dalam bucket dengan prefix tertentu.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prefix: Awalan objek yang dicari
|
||||||
|
recursive: Jika True, juga mencari di subdirektori
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List dari item objek
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
objects = []
|
||||||
|
async for obj in self.client.list_objects(self.bucket_name, prefix=prefix, recursive=recursive):
|
||||||
|
objects.append(
|
||||||
|
{
|
||||||
|
"name": obj.object_name,
|
||||||
|
"size": obj.size,
|
||||||
|
"last_modified": obj.last_modified,
|
||||||
|
"etag": obj.etag,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return objects
|
||||||
|
except S3Error as err:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error listing files: {str(err)}"
|
||||||
|
)
|
||||||
36
app/core/params.py
Normal file
36
app/core/params.py
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
import json
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import Query
|
||||||
|
|
||||||
|
|
||||||
|
class CommonParams:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filter: Optional[str] = Query(default=None),
|
||||||
|
sort: Optional[str] = Query(default=None),
|
||||||
|
search: str = Query(default=""),
|
||||||
|
group_by: Optional[str] = Query(default=None),
|
||||||
|
limit: int = Query(default=100, ge=1),
|
||||||
|
offset: int = Query(default=0, ge=0),
|
||||||
|
):
|
||||||
|
if filter:
|
||||||
|
try:
|
||||||
|
self.filter = json.loads(filter)
|
||||||
|
except Exception:
|
||||||
|
self.filter = filter
|
||||||
|
else:
|
||||||
|
self.filter = []
|
||||||
|
|
||||||
|
if sort:
|
||||||
|
try:
|
||||||
|
self.sort = json.loads(sort)
|
||||||
|
except Exception:
|
||||||
|
self.sort = sort
|
||||||
|
else:
|
||||||
|
self.sort = []
|
||||||
|
|
||||||
|
self.search = search
|
||||||
|
self.group_by = group_by
|
||||||
|
self.limit = limit
|
||||||
|
self.offset = offset
|
||||||
16
app/core/responses.py
Normal file
16
app/core/responses.py
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
from typing import Any, override
|
||||||
|
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
|
from app.utils.helpers import orjson_dumps
|
||||||
|
|
||||||
|
|
||||||
|
class ORJSONResponse(JSONResponse):
|
||||||
|
"""Custom JSONResponse menggunakan orjson."""
|
||||||
|
|
||||||
|
media_type = "application/json"
|
||||||
|
|
||||||
|
@override
|
||||||
|
def render(self, content: Any) -> bytes:
|
||||||
|
"""Render content menggunakan orjson."""
|
||||||
|
return orjson_dumps(content)
|
||||||
80
app/core/security.py
Normal file
80
app/core/security.py
Normal file
|
|
@ -0,0 +1,80 @@
|
||||||
|
# app/core/security.py
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Any, Dict, Optional, Union
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from jose import ExpiredSignatureError, JWTError, jwt
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
from pytz import timezone
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
# Password hashing
|
||||||
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
|
||||||
|
|
||||||
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
|
"""Verifikasi password."""
|
||||||
|
return pwd_context.verify(plain_password, hashed_password)
|
||||||
|
|
||||||
|
|
||||||
|
def get_password_hash(password: str) -> str:
|
||||||
|
"""Hash password."""
|
||||||
|
return pwd_context.hash(password)
|
||||||
|
|
||||||
|
|
||||||
|
def create_token(
|
||||||
|
subject: Union[str, Any], expires_delta: Optional[timedelta] = None, token_type: str = "access"
|
||||||
|
) -> str:
|
||||||
|
"""Buat JWT token."""
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.now(timezone(settings.TIMEZONE)) + expires_delta
|
||||||
|
else:
|
||||||
|
if token_type == "access":
|
||||||
|
expire = datetime.now(timezone(settings.TIMEZONE)) + timedelta(
|
||||||
|
minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||||
|
)
|
||||||
|
elif token_type == "refresh":
|
||||||
|
expire = datetime.now(timezone(settings.TIMEZONE)) + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
|
||||||
|
else:
|
||||||
|
expire = datetime.now(timezone(settings.TIMEZONE)) + timedelta(minutes=15)
|
||||||
|
|
||||||
|
to_encode = {
|
||||||
|
"exp": expire,
|
||||||
|
"iat": datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
"sub": str(subject),
|
||||||
|
"type": token_type,
|
||||||
|
}
|
||||||
|
|
||||||
|
encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
|
||||||
|
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
|
||||||
|
def create_access_token(subject: Union[str, Any]) -> str:
|
||||||
|
"""Buat access token."""
|
||||||
|
return create_token(subject, token_type="access")
|
||||||
|
|
||||||
|
|
||||||
|
def create_refresh_token(subject: Union[str, Any]) -> str:
|
||||||
|
"""Buat refresh token."""
|
||||||
|
return create_token(subject, token_type="refresh")
|
||||||
|
|
||||||
|
|
||||||
|
def decode_token(token: str) -> Dict[str, Any]:
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(
|
||||||
|
token,
|
||||||
|
settings.SECRET_KEY,
|
||||||
|
algorithms=[settings.ALGORITHM],
|
||||||
|
options={
|
||||||
|
"verify_signature": True,
|
||||||
|
"verify_exp": True,
|
||||||
|
"verify_iat": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return payload
|
||||||
|
except ExpiredSignatureError:
|
||||||
|
raise HTTPException(status_code=401, detail="Token expired")
|
||||||
|
except JWTError:
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid token")
|
||||||
117
app/main.py
Normal file
117
app/main.py
Normal file
|
|
@ -0,0 +1,117 @@
|
||||||
|
# local run
|
||||||
|
# pyenv local 3.13.0
|
||||||
|
# poetry env use python
|
||||||
|
# poetry install
|
||||||
|
# poetry install --no-root
|
||||||
|
|
||||||
|
# poetry run uvicorn app.main:app --reload
|
||||||
|
|
||||||
|
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
|
from asyncpg.exceptions import ForeignKeyViolationError
|
||||||
|
from brotli_asgi import BrotliMiddleware
|
||||||
|
from fastapi import FastAPI, Request, status
|
||||||
|
from fastapi.exceptions import HTTPException, RequestValidationError
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.responses import JSONResponse, ORJSONResponse
|
||||||
|
from fastapi_async_sqlalchemy import SQLAlchemyMiddleware
|
||||||
|
from sqlalchemy.exc import IntegrityError
|
||||||
|
|
||||||
|
from app.api.v1 import router as api_router
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.exceptions import APIException, prepare_error_response
|
||||||
|
from app.utils.system import optimize_system
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(app: FastAPI):
|
||||||
|
await optimize_system()
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
app = FastAPI(
|
||||||
|
title=settings.PROJECT_NAME,
|
||||||
|
version=settings.VERSION,
|
||||||
|
description=settings.DESCRIPTION,
|
||||||
|
default_response_class=ORJSONResponse,
|
||||||
|
lifespan=lifespan,
|
||||||
|
root_path="/api",
|
||||||
|
docs_url="/docs",
|
||||||
|
redoc_url="/redoc",
|
||||||
|
openapi_url="/openapi.json",
|
||||||
|
)
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
BrotliMiddleware,
|
||||||
|
minimum_size=1000,
|
||||||
|
)
|
||||||
|
app.add_middleware(
|
||||||
|
SQLAlchemyMiddleware,
|
||||||
|
db_url=settings.DATABASE_URL,
|
||||||
|
engine_args={"echo": settings.DEBUG},
|
||||||
|
)
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=settings.ALLOWED_ORIGINS,
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
app.include_router(api_router)
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(APIException)
|
||||||
|
async def api_exception_handler(request: Request, exc: APIException):
|
||||||
|
response_content = prepare_error_response(message=exc.message)
|
||||||
|
|
||||||
|
return JSONResponse(status_code=exc.status_code, content=response_content, headers=exc.headers)
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(HTTPException)
|
||||||
|
async def http_exception_handler(request: Request, exc: HTTPException):
|
||||||
|
response_content = prepare_error_response(message=str(exc.detail))
|
||||||
|
|
||||||
|
return JSONResponse(status_code=exc.status_code, content=response_content, headers=exc.headers)
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(RequestValidationError)
|
||||||
|
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
||||||
|
response_content = prepare_error_response(message=exc.errors())
|
||||||
|
|
||||||
|
return JSONResponse(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, content=response_content)
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(LookupError)
|
||||||
|
async def enum_exception_handler(request: Request, exc: LookupError):
|
||||||
|
error_message = str(exc)
|
||||||
|
|
||||||
|
if "is not among the defined enum values" in error_message:
|
||||||
|
response_content = prepare_error_response(
|
||||||
|
message="Invalid enum value",
|
||||||
|
detail=error_message,
|
||||||
|
)
|
||||||
|
return JSONResponse(status_code=status.HTTP_400_BAD_REQUEST, content=response_content)
|
||||||
|
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(ForeignKeyViolationError)
|
||||||
|
@app.exception_handler(IntegrityError)
|
||||||
|
async def global_exception_handler(request: Request, exc: Exception):
|
||||||
|
response_content = prepare_error_response(
|
||||||
|
message="Foreign key violation",
|
||||||
|
detail=str(exc) if settings.DEBUG else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(status_code=status.HTTP_400_BAD_REQUEST, content=response_content)
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(Exception)
|
||||||
|
async def global_exception_handler(request: Request, exc: Exception):
|
||||||
|
response_content = prepare_error_response(
|
||||||
|
message="Internal server error", detail=str(exc) if settings.DEBUG else None
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content=response_content)
|
||||||
38
app/models/__init__.py
Normal file
38
app/models/__init__.py
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
from .base import Base
|
||||||
|
from .category_model import CategoryModel
|
||||||
|
from .classification_model import ClassificationModel
|
||||||
|
from .credential_model import CredentialModel
|
||||||
|
from .file_model import FileModel
|
||||||
|
from .map_access_model import MapAccessModel
|
||||||
|
from .map_projection_system_model import MapProjectionSystemModel
|
||||||
|
from .map_source_model import MapSourceModel, SourceUsageModel
|
||||||
|
from .mapset_history_model import MapsetHistoryModel
|
||||||
|
from .mapset_model import MapsetModel
|
||||||
|
from .news_model import NewsModel
|
||||||
|
from .organization_model import OrganizationModel
|
||||||
|
from .refresh_token_model import RefreshTokenModel
|
||||||
|
from .regional_model import RegionalModel
|
||||||
|
from .role_model import RoleModel
|
||||||
|
from .user_model import UserModel
|
||||||
|
from .feedback_model import FeedbackModel
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Base",
|
||||||
|
"OrganizationModel",
|
||||||
|
"RoleModel",
|
||||||
|
"UserModel",
|
||||||
|
"RefreshTokenModel",
|
||||||
|
"NewsModel",
|
||||||
|
"FileModel",
|
||||||
|
"CredentialModel",
|
||||||
|
"MapsetModel",
|
||||||
|
"MapSourceModel",
|
||||||
|
"MapProjectionSystemModel",
|
||||||
|
"MapAccessModel",
|
||||||
|
"MapsetHistoryModel",
|
||||||
|
"CategoryModel",
|
||||||
|
"ClassificationModel",
|
||||||
|
"RegionalModel",
|
||||||
|
"SourceUsageModel",
|
||||||
|
"FeedbackModel",
|
||||||
|
]
|
||||||
12
app/models/base.py
Normal file
12
app/models/base.py
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from sqlalchemy.orm import declarative_base
|
||||||
|
|
||||||
|
from app.utils.helpers import orm_to_dict
|
||||||
|
|
||||||
|
|
||||||
|
class Base(declarative_base()):
|
||||||
|
__abstract__ = True
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
return orm_to_dict(self)
|
||||||
16
app/models/category_model.py
Normal file
16
app/models/category_model.py
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
import uuid6
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, Integer, String, Text, text
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryModel(Base):
|
||||||
|
__tablename__ = "categories"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String)
|
||||||
|
description = Column(Text)
|
||||||
|
thumbnail = Column(String)
|
||||||
|
count_mapset = Column(Integer, default=0, server_default=text("0"))
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
order = Column(Integer, default=0)
|
||||||
14
app/models/classification_model.py
Normal file
14
app/models/classification_model.py
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
import uuid6
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, String
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class ClassificationModel(Base):
|
||||||
|
__tablename__ = "classifications"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String(20), nullable=False)
|
||||||
|
is_open = Column(Boolean, default=True)
|
||||||
|
is_limited = Column(Boolean, default=False)
|
||||||
|
is_secret = Column(Boolean, default=False)
|
||||||
28
app/models/credential_model.py
Normal file
28
app/models/credential_model.py
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from sqlalchemy import JSON, UUID, Boolean, Column, DateTime, ForeignKey, String, Text
|
||||||
|
from sqlalchemy.orm import Mapped
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class CredentialModel(Base):
|
||||||
|
__tablename__ = "credentials"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String)
|
||||||
|
description = Column(Text)
|
||||||
|
encrypted_data = Column(Text, nullable=False)
|
||||||
|
encryption_iv = Column(String(255), nullable=False)
|
||||||
|
credential_type = Column(String(50), nullable=False)
|
||||||
|
credential_metadata: Mapped[dict] = Column(JSON, nullable=True)
|
||||||
|
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
||||||
|
updated_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.now)
|
||||||
|
updated_at = Column(DateTime(timezone=True), onupdate=datetime.now)
|
||||||
|
last_used_at = Column(DateTime(timezone=True), nullable=True)
|
||||||
|
last_used_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
|
||||||
|
is_default = Column(Boolean, default=False)
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
is_deleted = Column(Boolean, default=False)
|
||||||
23
app/models/feedback_model.py
Normal file
23
app/models/feedback_model.py
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from sqlalchemy import Boolean, Column, DateTime, Integer, String, Text, func
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class FeedbackModel(Base):
|
||||||
|
__tablename__ = "feedback"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
datetime = Column(DateTime, default=datetime.now)
|
||||||
|
score = Column(Integer)
|
||||||
|
tujuan_tercapai = Column(Boolean, default=True)
|
||||||
|
tujuan_ditemukan = Column(Boolean, default=True)
|
||||||
|
tujuan = Column(String)
|
||||||
|
sektor = Column(String)
|
||||||
|
email = Column(String)
|
||||||
|
saran = Column(Text)
|
||||||
|
source_url = Column(String)
|
||||||
|
source_access = Column(String)
|
||||||
|
notes = Column(Text)
|
||||||
|
gender = Column(Integer)
|
||||||
31
app/models/file_model.py
Normal file
31
app/models/file_model.py
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Column, DateTime, ForeignKey, Integer, String, Text
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class FileModel(Base):
|
||||||
|
__tablename__ = "files"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid6.uuid7, index=True)
|
||||||
|
filename = Column(String(255), nullable=False, index=True)
|
||||||
|
object_name = Column(String(512), nullable=False, unique=True)
|
||||||
|
content_type = Column(String(100), nullable=False)
|
||||||
|
size = Column(Integer, nullable=False)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
url = Column(String(1024), nullable=False)
|
||||||
|
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
modified_at = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
onupdate=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
|
|
||||||
|
uploaded_by = relationship("UserModel", lazy="selectin", uselist=False)
|
||||||
45
app/models/map_access_model.py
Normal file
45
app/models/map_access_model.py
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID as SQLUUID
|
||||||
|
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String
|
||||||
|
from sqlalchemy.orm import Mapped
|
||||||
|
from uuid6 import UUID, uuid7
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class MapAccessModel(Base):
|
||||||
|
__tablename__ = "mapset_access"
|
||||||
|
|
||||||
|
id: Mapped[UUID] = Column(String, primary_key=True, default=uuid7)
|
||||||
|
mapset_id: Mapped[UUID] = Column(
|
||||||
|
SQLUUID(as_uuid=True), ForeignKey("mapsets.id", ondelete="CASCADE"), nullable=False
|
||||||
|
)
|
||||||
|
user_id: Mapped[Optional[UUID]] = Column(
|
||||||
|
SQLUUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=True
|
||||||
|
)
|
||||||
|
organization_id: Mapped[Optional[UUID]] = Column(
|
||||||
|
SQLUUID(as_uuid=True), ForeignKey("organizations.id", ondelete="CASCADE"), nullable=True
|
||||||
|
)
|
||||||
|
granted_by: Mapped[UUID] = Column(SQLUUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
||||||
|
can_read: Mapped[bool] = Column(Boolean, default=True)
|
||||||
|
can_write: Mapped[bool] = Column(Boolean, default=False)
|
||||||
|
can_delete: Mapped[bool] = Column(Boolean, default=False)
|
||||||
|
|
||||||
|
created_at: Mapped[datetime] = Column(DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
updated_at: Mapped[datetime] = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
onupdate=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
|
# expires_at: Mapped[Optional[datetime]] = Column(DateTime(timezone=True), nullable=True) # Optional expiry
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
# mapset = relationship("MapsetModel", back_populates="access_grants")
|
||||||
|
# user = relationship("UserModel", foreign_keys=[user_id], back_populates="mapset_access")
|
||||||
|
# organization = relationship("OrganizationModel", back_populates="mapset_access")
|
||||||
|
# grantor = relationship("UserModel", foreign_keys=[granted_by])
|
||||||
11
app/models/map_projection_system_model.py
Normal file
11
app/models/map_projection_system_model.py
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
import uuid6
|
||||||
|
from sqlalchemy import UUID, Column, String
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class MapProjectionSystemModel(Base):
|
||||||
|
__tablename__ = "map_projection_systems"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String(50), nullable=False)
|
||||||
56
app/models/map_source_model.py
Normal file
56
app/models/map_source_model.py
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, DateTime, ForeignKey, String, Text
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class MapSourceModel(Base):
|
||||||
|
__tablename__ = "map_sources"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String(50), nullable=False)
|
||||||
|
description = Column(Text)
|
||||||
|
url = Column(Text, nullable=True)
|
||||||
|
credential_id = Column(UUID(as_uuid=True), ForeignKey("credentials.id"))
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
is_deleted = Column(Boolean, default=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
updated_at = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
onupdate=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
|
|
||||||
|
usages = relationship("SourceUsageModel", back_populates="source", lazy="selectin")
|
||||||
|
mapsets = relationship(
|
||||||
|
"MapsetModel",
|
||||||
|
secondary="source_usages",
|
||||||
|
primaryjoin="MapSourceModel.id == SourceUsageModel.source_id",
|
||||||
|
secondaryjoin="SourceUsageModel.mapset_id == MapsetModel.id",
|
||||||
|
lazy="selectin",
|
||||||
|
viewonly=True,
|
||||||
|
)
|
||||||
|
credential = relationship("CredentialModel", lazy="selectin", uselist=False)
|
||||||
|
|
||||||
|
|
||||||
|
class SourceUsageModel(Base):
|
||||||
|
__tablename__ = "source_usages"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
source_id = Column(UUID(as_uuid=True), ForeignKey("map_sources.id"), nullable=False)
|
||||||
|
mapset_id = Column(UUID(as_uuid=True), ForeignKey("mapsets.id"), nullable=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
updated_at = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
onupdate=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
|
|
||||||
|
mapset = relationship("MapsetModel", back_populates="source_usages")
|
||||||
|
source = relationship("MapSourceModel", back_populates="usages")
|
||||||
27
app/models/mapset_history_model.py
Normal file
27
app/models/mapset_history_model.py
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Column, DateTime, ForeignKey, String, Text
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetHistoryModel(Base):
|
||||||
|
"""Model untuk melacak riwayat perubahan pada mapset."""
|
||||||
|
|
||||||
|
__tablename__ = "mapset_histories"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
mapset_id = Column(UUID(as_uuid=True), ForeignKey("mapsets.id"), index=True, comment="ID mapset yang dilacak")
|
||||||
|
validation_type = Column(String(50), nullable=False, comment="Jenis perubahan pada mapset")
|
||||||
|
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), comment="ID pengguna yang melakukan perubahan")
|
||||||
|
notes = Column(Text, nullable=True, comment="Catatan detail perubahan yang dilakukan")
|
||||||
|
timestamp = Column(
|
||||||
|
DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)), comment="Waktu perubahan tercatat"
|
||||||
|
)
|
||||||
|
|
||||||
|
user = relationship("UserModel", uselist=False, lazy="selectin")
|
||||||
69
app/models/mapset_model.py
Normal file
69
app/models/mapset_model.py
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, DateTime, ForeignKey, Integer, String, Text
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetStatus(str, Enum):
|
||||||
|
approved = "approved"
|
||||||
|
rejected = "rejected"
|
||||||
|
on_verification = "on_verification"
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetModel(Base):
|
||||||
|
__tablename__ = "mapsets"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String(255), nullable=False)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
scale = Column(String(29), nullable=False)
|
||||||
|
layer_url = Column(Text)
|
||||||
|
layer_type = Column(String(20), nullable=True)
|
||||||
|
metadata_url = Column(Text)
|
||||||
|
category_id = Column(UUID(as_uuid=True), ForeignKey("categories.id"))
|
||||||
|
classification_id = Column(UUID(as_uuid=True), ForeignKey("classifications.id"))
|
||||||
|
regional_id = Column(UUID(as_uuid=True), ForeignKey("regionals.id"), nullable=True)
|
||||||
|
projection_system_id = Column(UUID(as_uuid=True), ForeignKey("map_projection_systems.id"))
|
||||||
|
producer_id = Column(UUID(as_uuid=True), ForeignKey("organizations.id"))
|
||||||
|
status_validation = Column(String(20), nullable=True)
|
||||||
|
data_status = Column(String(20), nullable=False)
|
||||||
|
data_update_period = Column(String(20), nullable=False)
|
||||||
|
data_version = Column(String(20), nullable=False)
|
||||||
|
coverage_level = Column(String(20), nullable=True)
|
||||||
|
coverage_area = Column(String(20), nullable=True)
|
||||||
|
view_count = Column(Integer, default=0)
|
||||||
|
download_count = Column(Integer, default=0)
|
||||||
|
order = Column(Integer, default=0)
|
||||||
|
is_popular = Column(Boolean, default=False)
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
is_deleted = Column(Boolean, default=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
updated_at = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
onupdate=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
|
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id"))
|
||||||
|
updated_by = Column(UUID(as_uuid=True), ForeignKey("users.id"))
|
||||||
|
|
||||||
|
projection_system = relationship("MapProjectionSystemModel", uselist=False, lazy="selectin")
|
||||||
|
classification = relationship("ClassificationModel", uselist=False, lazy="selectin")
|
||||||
|
category = relationship("CategoryModel", uselist=False, lazy="selectin")
|
||||||
|
regional = relationship("RegionalModel", uselist=False, lazy="selectin")
|
||||||
|
source_usages = relationship("SourceUsageModel", back_populates="mapset", lazy="selectin")
|
||||||
|
sources = relationship(
|
||||||
|
"MapSourceModel",
|
||||||
|
secondary="source_usages",
|
||||||
|
primaryjoin="MapsetModel.id == SourceUsageModel.mapset_id",
|
||||||
|
secondaryjoin="SourceUsageModel.source_id == MapSourceModel.id",
|
||||||
|
lazy="selectin",
|
||||||
|
viewonly=True,
|
||||||
|
)
|
||||||
|
producer = relationship("OrganizationModel", back_populates="mapsets", uselist=False, lazy="joined")
|
||||||
29
app/models/news_model.py
Normal file
29
app/models/news_model.py
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, DateTime, String, Text, func
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class NewsModel(Base):
|
||||||
|
__tablename__ = "news"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String)
|
||||||
|
description = Column(Text)
|
||||||
|
thumbnail = Column(String)
|
||||||
|
created_at = Column(
|
||||||
|
DateTime(timezone=True), server_default=func.now(), default=datetime.now(timezone(settings.TIMEZONE))
|
||||||
|
)
|
||||||
|
updated_at = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
server_default=func.now(),
|
||||||
|
onupdate=func.now(),
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
is_deleted = Column(Boolean, default=False)
|
||||||
42
app/models/organization_model.py
Normal file
42
app/models/organization_model.py
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, DateTime, String, Text
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class OrganizationModel(Base):
|
||||||
|
__tablename__ = "organizations"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String(100), nullable=False)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
thumbnail = Column(String(255), nullable=True)
|
||||||
|
address = Column(String(255), nullable=True)
|
||||||
|
phone_number = Column(String(15), nullable=True)
|
||||||
|
email = Column(String(100), nullable=True)
|
||||||
|
website = Column(String(255), nullable=True)
|
||||||
|
is_active = Column(Boolean, default=True, server_default="true")
|
||||||
|
is_deleted = Column(Boolean, default=False, server_default="false")
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
modified_at = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
nullable=True,
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
onupdate=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
|
|
||||||
|
users = relationship("UserModel", lazy="selectin")
|
||||||
|
mapsets = relationship("MapsetModel", lazy="selectin")
|
||||||
|
|
||||||
|
# @property
|
||||||
|
# def count_mapset(self):
|
||||||
|
# if self.mapsets is None:
|
||||||
|
# return 0
|
||||||
|
# else:
|
||||||
|
# return len(self.mapsets)
|
||||||
23
app/models/refresh_token_model.py
Normal file
23
app/models/refresh_token_model.py
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, DateTime, ForeignKey, String
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class RefreshTokenModel(Base):
|
||||||
|
__tablename__ = "refresh_tokens"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False, index=True)
|
||||||
|
token = Column(String(255), nullable=False, index=True)
|
||||||
|
expires_at = Column(DateTime(timezone=True), nullable=False)
|
||||||
|
revoked = Column(Boolean, default=False, server_default="false")
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
|
||||||
|
user = relationship("UserModel", lazy="selectin", uselist=False)
|
||||||
26
app/models/regional_model.py
Normal file
26
app/models/regional_model.py
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, DateTime, String, Text
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class RegionalModel(Base):
|
||||||
|
__tablename__ = "regionals"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
code = Column(String(10), nullable=False)
|
||||||
|
name = Column(String(50), nullable=False)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
thumbnail = Column(String(255), nullable=True)
|
||||||
|
is_active = Column(Boolean, default=True, nullable=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
updated_at = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
onupdate=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
32
app/models/role_model.py
Normal file
32
app/models/role_model.py
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, DateTime, String, Text
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class RoleModel(Base):
|
||||||
|
__tablename__ = "roles"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String(20), nullable=False)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
is_active = Column(Boolean, default=True, nullable=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
updated_at = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
onupdate=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
|
|
||||||
|
users = relationship("UserModel", lazy="selectin")
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
# organization = relationship("OrganizationModel", back_populates="members")
|
||||||
|
# produced_mapsets = relationship("MapsetModel", back_populates="producer")
|
||||||
|
# mapset_access = relationship("MapsetAccessModel", foreign_keys=[MapsetAccessModel.user_id], back_populates="user")
|
||||||
36
app/models/user_model.py
Normal file
36
app/models/user_model.py
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import uuid6
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import UUID, Boolean, Column, DateTime, ForeignKey, String
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class UserModel(Base):
|
||||||
|
__tablename__ = "users"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid6.uuid7)
|
||||||
|
name = Column(String, nullable=False)
|
||||||
|
email = Column(String, unique=True, nullable=False)
|
||||||
|
profile_picture = Column(String, nullable=True)
|
||||||
|
username = Column(String, unique=True, nullable=False)
|
||||||
|
password = Column(String, nullable=False)
|
||||||
|
position = Column(String, nullable=True)
|
||||||
|
role_id = Column(UUID(as_uuid=True), ForeignKey("roles.id"), nullable=False)
|
||||||
|
employee_id = Column(String, nullable=True)
|
||||||
|
organization_id = Column(UUID(as_uuid=True), ForeignKey("organizations.id"), nullable=False)
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
is_deleted = Column(Boolean, default=False)
|
||||||
|
created_at = Column(DateTime(timezone=True), default=datetime.now(timezone(settings.TIMEZONE)))
|
||||||
|
modified_at = Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
default=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
onupdate=datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
)
|
||||||
|
|
||||||
|
organization = relationship("OrganizationModel", back_populates="users", lazy="selectin", uselist=False)
|
||||||
|
role = relationship("RoleModel", back_populates="users", lazy="selectin", uselist=False)
|
||||||
39
app/repositories/__init__.py
Normal file
39
app/repositories/__init__.py
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
from .base import BaseRepository
|
||||||
|
from .category_repository import CategoryRepository
|
||||||
|
from .classification_repository import ClassificationRepository
|
||||||
|
from .credential_repository import CredentialRepository
|
||||||
|
from .file_repository import FileRepository
|
||||||
|
from .map_access_repository import MapAccessRepository
|
||||||
|
from .map_projection_system_repository import MapProjectionSystemRepository
|
||||||
|
from .map_source_repository import MapSourceRepository
|
||||||
|
from .map_source_usage_repository import SourceUsageRepository
|
||||||
|
from .mapset_history_repository import MapsetHistoryRepository
|
||||||
|
from .mapset_repository import MapsetRepository
|
||||||
|
from .news_repository import NewsRepository
|
||||||
|
from .organization_repository import OrganizationRepository
|
||||||
|
from .regional_repository import RegionalRepository
|
||||||
|
from .role_repository import RoleRepository
|
||||||
|
from .token_repository import TokenRepository
|
||||||
|
from .user_repository import UserRepository
|
||||||
|
from .feedback_repository import FeedbackRepository
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseRepository",
|
||||||
|
"OrganizationRepository",
|
||||||
|
"RoleRepository",
|
||||||
|
"UserRepository",
|
||||||
|
"TokenRepository",
|
||||||
|
"NewsRepository",
|
||||||
|
"FileRepository",
|
||||||
|
"CredentialRepository",
|
||||||
|
"MapSourceRepository",
|
||||||
|
"MapProjectionSystemRepository",
|
||||||
|
"MapAccessRepository",
|
||||||
|
"CategoryRepository",
|
||||||
|
"ClassificationRepository",
|
||||||
|
"FeedbackRepository",
|
||||||
|
"RegionalRepository",
|
||||||
|
"MapsetRepository",
|
||||||
|
"MapsetHistoryRepository",
|
||||||
|
"SourceUsageRepository",
|
||||||
|
]
|
||||||
178
app/repositories/base.py
Normal file
178
app/repositories/base.py
Normal file
|
|
@ -0,0 +1,178 @@
|
||||||
|
from typing import Any, Dict, Generic, List, Optional, Tuple, Type, TypeVar
|
||||||
|
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from sqlalchemy import String, cast
|
||||||
|
from sqlalchemy import delete as sqlalchemy_delete
|
||||||
|
from sqlalchemy import func, or_, select
|
||||||
|
from sqlalchemy import update as sqlalchemy_update
|
||||||
|
from sqlalchemy.orm import joinedload, selectinload
|
||||||
|
from uuid6 import UUID
|
||||||
|
|
||||||
|
from app.core.database import Base
|
||||||
|
|
||||||
|
ModelType = TypeVar("ModelType", bound=Base)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseRepository(Generic[ModelType]):
|
||||||
|
"""Optimized base repository with fastapi-async-sqlalchemy."""
|
||||||
|
|
||||||
|
def __init__(self, model: Type[ModelType]):
|
||||||
|
self.model: Type[ModelType] = model
|
||||||
|
|
||||||
|
def build_base_query(self, include_deleted: bool = False):
|
||||||
|
"""Build base query dengan soft delete handling."""
|
||||||
|
query = select(self.model)
|
||||||
|
if hasattr(self.model, "is_deleted") and not include_deleted:
|
||||||
|
query = query.where(self.model.is_deleted.is_(False))
|
||||||
|
return query
|
||||||
|
|
||||||
|
async def find_by_id(self, id: UUID, relationships: List[str] = None) -> Optional[ModelType]:
|
||||||
|
"""Find record by ID dengan optional eager loading."""
|
||||||
|
query = self.build_base_query().where(self.model.id == id)
|
||||||
|
|
||||||
|
if relationships:
|
||||||
|
for rel in relationships:
|
||||||
|
if hasattr(self.model, rel):
|
||||||
|
attr = getattr(self.model, rel)
|
||||||
|
if hasattr(attr.property, "collection_class"):
|
||||||
|
query = query.options(selectinload(attr))
|
||||||
|
else:
|
||||||
|
query = query.options(joinedload(attr))
|
||||||
|
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def find_all(
|
||||||
|
self,
|
||||||
|
filters: list = [],
|
||||||
|
sort: list = [],
|
||||||
|
search: str = "",
|
||||||
|
group_by: str = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0,
|
||||||
|
relationships: List[str] = None,
|
||||||
|
searchable_columns: List[str] = None,
|
||||||
|
) -> Tuple[List[ModelType], int]:
|
||||||
|
"""Optimized find_all method."""
|
||||||
|
|
||||||
|
query = self.build_base_query().filter(*filters)
|
||||||
|
|
||||||
|
# Optimized search
|
||||||
|
if search:
|
||||||
|
if searchable_columns:
|
||||||
|
search_conditions = [
|
||||||
|
cast(getattr(self.model, col), String).ilike(f"%{search}%")
|
||||||
|
for col in searchable_columns
|
||||||
|
if hasattr(self.model, col)
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
search_conditions = [
|
||||||
|
cast(getattr(self.model, col), String).ilike(f"%{search}%")
|
||||||
|
for col in self.model.__table__.columns.keys()
|
||||||
|
if not col.startswith("_")
|
||||||
|
]
|
||||||
|
|
||||||
|
if search_conditions:
|
||||||
|
query = query.where(or_(*search_conditions))
|
||||||
|
|
||||||
|
if group_by:
|
||||||
|
query = query.group_by(getattr(self.model, group_by))
|
||||||
|
|
||||||
|
# Count query
|
||||||
|
count_query = select(func.count()).select_from(query.subquery())
|
||||||
|
total = await db.session.scalar(count_query)
|
||||||
|
|
||||||
|
# Data query
|
||||||
|
if sort:
|
||||||
|
query = query.order_by(*sort)
|
||||||
|
else:
|
||||||
|
query = query.order_by(self.model.id)
|
||||||
|
|
||||||
|
if relationships:
|
||||||
|
for rel in relationships:
|
||||||
|
if hasattr(self.model, rel):
|
||||||
|
attr = getattr(self.model, rel)
|
||||||
|
if hasattr(attr.property, "collection_class"):
|
||||||
|
query = query.options(selectinload(attr))
|
||||||
|
else:
|
||||||
|
query = query.options(joinedload(attr))
|
||||||
|
|
||||||
|
query = query.limit(limit).offset(offset)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
records = result.scalars().all()
|
||||||
|
|
||||||
|
return records, total
|
||||||
|
|
||||||
|
async def create(self, data: Dict[str, Any]) -> ModelType:
|
||||||
|
"""Create new record."""
|
||||||
|
new_record = self.model(**data)
|
||||||
|
db.session.add(new_record)
|
||||||
|
await db.session.commit()
|
||||||
|
await db.session.refresh(new_record)
|
||||||
|
return new_record
|
||||||
|
|
||||||
|
async def bulk_create(
|
||||||
|
self, data: List[Dict[str, Any]], batch_size: int = 1000, return_records: bool = False
|
||||||
|
) -> Optional[List[ModelType]]:
|
||||||
|
"""Bulk create dengan batching."""
|
||||||
|
if not data:
|
||||||
|
return [] if return_records else None
|
||||||
|
|
||||||
|
created_records = []
|
||||||
|
|
||||||
|
for i in range(0, len(data), batch_size):
|
||||||
|
batch = data[i : i + batch_size]
|
||||||
|
|
||||||
|
if return_records:
|
||||||
|
batch_records = [self.model(**item) for item in batch]
|
||||||
|
db.session.add_all(batch_records)
|
||||||
|
created_records.extend(batch_records)
|
||||||
|
else:
|
||||||
|
await db.session.execute(self.model.__table__.insert(), batch)
|
||||||
|
|
||||||
|
await db.session.commit()
|
||||||
|
|
||||||
|
if return_records:
|
||||||
|
for record in created_records:
|
||||||
|
await db.session.refresh(record)
|
||||||
|
return created_records
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def update(self, id: UUID, data: Dict[str, Any], refresh: bool = True) -> Optional[ModelType]:
|
||||||
|
"""Update record dengan optimization."""
|
||||||
|
clean_data = {k: v for k, v in data.items() if v is not None}
|
||||||
|
|
||||||
|
if not clean_data:
|
||||||
|
return await self.find_by_id(id) if refresh else None
|
||||||
|
|
||||||
|
query = (
|
||||||
|
sqlalchemy_update(self.model)
|
||||||
|
.where(self.model.id == id)
|
||||||
|
.values(**clean_data)
|
||||||
|
.execution_options(synchronize_session="fetch")
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
await db.session.commit()
|
||||||
|
|
||||||
|
if result.rowcount == 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return await self.find_by_id(id) if refresh else None
|
||||||
|
|
||||||
|
async def delete(self, id: UUID) -> bool:
|
||||||
|
"""Delete record."""
|
||||||
|
query = sqlalchemy_delete(self.model).where(self.model.id == id)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
await db.session.commit()
|
||||||
|
return result.rowcount > 0
|
||||||
|
|
||||||
|
async def exists(self, id: UUID) -> bool:
|
||||||
|
"""Check if record exists."""
|
||||||
|
query = select(1).where(self.model.id == id)
|
||||||
|
if hasattr(self.model, "is_deleted"):
|
||||||
|
query = query.where(self.model.is_deleted.is_(False))
|
||||||
|
|
||||||
|
result = await db.session.scalar(query)
|
||||||
|
return result is not None
|
||||||
143
app/repositories/category_repository.py
Normal file
143
app/repositories/category_repository.py
Normal file
|
|
@ -0,0 +1,143 @@
|
||||||
|
from app.models import CategoryModel, MapsetModel, ClassificationModel
|
||||||
|
from sqlalchemy import func, or_, cast, String, select
|
||||||
|
from sqlalchemy.orm import joinedload, selectinload
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from typing import List, Tuple, Optional
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryRepository(BaseRepository[CategoryModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def find_by_id(self, id: UUID, relationships: List[str] = None) -> Optional[CategoryModel]:
|
||||||
|
"""Find category by ID with mapset count."""
|
||||||
|
# Create subquery for mapset count
|
||||||
|
mapset_count_subquery = (
|
||||||
|
select(func.count(MapsetModel.id))
|
||||||
|
.join(ClassificationModel, MapsetModel.classification_id == ClassificationModel.id)
|
||||||
|
.where(MapsetModel.category_id == id)
|
||||||
|
.where(MapsetModel.is_deleted == False)
|
||||||
|
.where(MapsetModel.is_active == True)
|
||||||
|
.where(MapsetModel.status_validation == "approved")
|
||||||
|
.where(ClassificationModel.is_open == True)
|
||||||
|
.scalar_subquery()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Build query with mapset count
|
||||||
|
query = (
|
||||||
|
select(self.model, mapset_count_subquery.label('mapset_count'))
|
||||||
|
.where(self.model.id == id)
|
||||||
|
)
|
||||||
|
|
||||||
|
if hasattr(self.model, "is_deleted"):
|
||||||
|
query = query.where(self.model.is_deleted.is_(False))
|
||||||
|
|
||||||
|
if relationships:
|
||||||
|
for rel in relationships:
|
||||||
|
if hasattr(self.model, rel):
|
||||||
|
attr = getattr(self.model, rel)
|
||||||
|
if hasattr(attr.property, "collection_class"):
|
||||||
|
query = query.options(selectinload(attr))
|
||||||
|
else:
|
||||||
|
query = query.options(joinedload(attr))
|
||||||
|
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
row = result.first()
|
||||||
|
|
||||||
|
if row:
|
||||||
|
category = row[0]
|
||||||
|
category.count_mapset = row[1] if row[1] is not None else 0
|
||||||
|
return category
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def find_all(
|
||||||
|
self,
|
||||||
|
filters: list = [],
|
||||||
|
sort: list = [],
|
||||||
|
search: str = "",
|
||||||
|
group_by: str = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0,
|
||||||
|
relationships: List[str] = None,
|
||||||
|
searchable_columns: List[str] = None,
|
||||||
|
) -> Tuple[List[CategoryModel], int]:
|
||||||
|
"""Optimized find_all method with mapset count."""
|
||||||
|
|
||||||
|
# Create subquery for mapset count
|
||||||
|
mapset_count_subquery = (
|
||||||
|
select(
|
||||||
|
MapsetModel.category_id,
|
||||||
|
func.count(MapsetModel.id).label('mapset_count')
|
||||||
|
)
|
||||||
|
.join(ClassificationModel, MapsetModel.classification_id == ClassificationModel.id)
|
||||||
|
.where(MapsetModel.is_deleted == False)
|
||||||
|
.where(MapsetModel.is_active == True)
|
||||||
|
.where(MapsetModel.status_validation == "approved")
|
||||||
|
.where(ClassificationModel.is_open == True)
|
||||||
|
.group_by(MapsetModel.category_id)
|
||||||
|
.subquery()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Build base query with mapset count
|
||||||
|
query = (
|
||||||
|
select(self.model, func.coalesce(mapset_count_subquery.c.mapset_count, 0).label('mapset_count'))
|
||||||
|
.outerjoin(mapset_count_subquery, self.model.id == mapset_count_subquery.c.category_id)
|
||||||
|
.filter(*filters)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Optimized search
|
||||||
|
if search:
|
||||||
|
if searchable_columns:
|
||||||
|
search_conditions = [
|
||||||
|
cast(getattr(self.model, col), String).ilike(f"%{search}%")
|
||||||
|
for col in searchable_columns
|
||||||
|
if hasattr(self.model, col)
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
search_conditions = [
|
||||||
|
cast(getattr(self.model, col), String).ilike(f"%{search}%")
|
||||||
|
for col in self.model.__table__.columns.keys()
|
||||||
|
if not col.startswith("_")
|
||||||
|
]
|
||||||
|
|
||||||
|
if search_conditions:
|
||||||
|
query = query.where(or_(*search_conditions))
|
||||||
|
|
||||||
|
if group_by:
|
||||||
|
query = query.group_by(getattr(self.model, group_by))
|
||||||
|
|
||||||
|
# Count query
|
||||||
|
count_query = select(func.count()).select_from(query.subquery())
|
||||||
|
total = await db.session.scalar(count_query)
|
||||||
|
|
||||||
|
# Data query
|
||||||
|
if sort:
|
||||||
|
query = query.order_by(*sort)
|
||||||
|
else:
|
||||||
|
query = query.order_by(self.model.order.asc())
|
||||||
|
|
||||||
|
if relationships:
|
||||||
|
for rel in relationships:
|
||||||
|
if hasattr(self.model, rel):
|
||||||
|
attr = getattr(self.model, rel)
|
||||||
|
if hasattr(attr.property, "collection_class"):
|
||||||
|
query = query.options(selectinload(attr))
|
||||||
|
else:
|
||||||
|
query = query.options(joinedload(attr))
|
||||||
|
|
||||||
|
query = query.limit(limit).offset(offset)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
|
||||||
|
# Extract records and set mapset_count
|
||||||
|
records = []
|
||||||
|
for row in result:
|
||||||
|
category = row[0]
|
||||||
|
category.count_mapset = row[1]
|
||||||
|
records.append(category)
|
||||||
|
|
||||||
|
return records, total
|
||||||
|
|
||||||
8
app/repositories/classification_repository.py
Normal file
8
app/repositories/classification_repository.py
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
from app.models import ClassificationModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class ClassificationRepository(BaseRepository[ClassificationModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
120
app/repositories/credential_repository.py
Normal file
120
app/repositories/credential_repository.py
Normal file
|
|
@ -0,0 +1,120 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import select, update
|
||||||
|
from uuid6 import UUID
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.models import CredentialModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class CredentialRepository(BaseRepository[CredentialModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def create(self, data: Dict[str, Any]):
|
||||||
|
credential = await super().create(data)
|
||||||
|
if credential.is_default:
|
||||||
|
await self.set_default(credential.id, credential.created_by)
|
||||||
|
|
||||||
|
return credential
|
||||||
|
|
||||||
|
async def get_by_type(self, credential_type: str, is_active: bool = True) -> List[CredentialModel]:
|
||||||
|
"""
|
||||||
|
Mendapatkan semua kredensial berdasarkan tipe.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
credential_type: Tipe kredensial ('database', 'api', 'minio', dll)
|
||||||
|
is_active: Filter berdasarkan status aktif
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List dari credential models
|
||||||
|
"""
|
||||||
|
query = select(self.model).where(self.model.credential_type == credential_type)
|
||||||
|
|
||||||
|
if is_active is not None:
|
||||||
|
query = query.where(self.model.is_active == is_active)
|
||||||
|
|
||||||
|
query = query.order_by(self.model.created_at.desc())
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
|
|
||||||
|
async def get_default_by_type(self, credential_type: str, is_active: bool = True) -> Optional[CredentialModel]:
|
||||||
|
"""
|
||||||
|
Mendapatkan kredensial default berdasarkan tipe.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
credential_type: Tipe kredensial ('database', 'api', 'minio', dll)
|
||||||
|
is_active: Filter berdasarkan status aktif
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Credential model atau None jika tidak ditemukan
|
||||||
|
"""
|
||||||
|
query = select(self.model).where(self.model.credential_type == credential_type, self.model.is_default == True)
|
||||||
|
|
||||||
|
if is_active is not None:
|
||||||
|
query = query.where(self.model.is_active == is_active)
|
||||||
|
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().first()
|
||||||
|
|
||||||
|
async def set_default(self, credential_id: UUID, updated_by: UUID) -> bool:
|
||||||
|
"""
|
||||||
|
Set kredensial sebagai default untuk tipenya, dan unset default
|
||||||
|
untuk kredensial lain dengan tipe yang sama.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
credential_id: ID kredensial yang akan dijadikan default
|
||||||
|
updated_by: ID user yang melakukan update
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Boolean yang menunjukkan keberhasilan operasi
|
||||||
|
"""
|
||||||
|
# Dapatkan kredensial yang akan diset sebagai default
|
||||||
|
cred = await self.find_by_id(credential_id)
|
||||||
|
if not cred:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Reset default flag untuk semua kredensial dengan tipe yang sama
|
||||||
|
reset_query = (
|
||||||
|
update(self.model)
|
||||||
|
.where(self.model.credential_type == cred.credential_type)
|
||||||
|
.values(is_default=False, updated_by=updated_by)
|
||||||
|
)
|
||||||
|
await db.session.execute(reset_query)
|
||||||
|
|
||||||
|
# Set sebagai default
|
||||||
|
set_query = (
|
||||||
|
update(self.model).where(self.model.id == credential_id).values(is_default=True, updated_by=updated_by)
|
||||||
|
)
|
||||||
|
await db.session.execute(set_query)
|
||||||
|
await db.session.commit()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def update_last_used(self, credential_id: UUID, user_id: UUID) -> bool:
|
||||||
|
"""
|
||||||
|
Update timestamp penggunaan terakhir.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
credential_id: ID kredensial yang digunakan
|
||||||
|
user_id: ID user yang menggunakan
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Boolean yang menunjukkan keberhasilan operasi
|
||||||
|
"""
|
||||||
|
update_query = (
|
||||||
|
update(self.model)
|
||||||
|
.where(self.model.id == credential_id)
|
||||||
|
.values(last_used_at=datetime.now(timezone(settings.TIMEZONE)), last_used_by=user_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
await db.session.execute(update_query)
|
||||||
|
await db.session.commit()
|
||||||
|
|
||||||
|
return True
|
||||||
6
app/repositories/feedback_repository.py
Normal file
6
app/repositories/feedback_repository.py
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
from app.models.feedback_model import FeedbackModel
|
||||||
|
from app.repositories.base import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class FeedbackRepository(BaseRepository):
|
||||||
|
model = FeedbackModel
|
||||||
16
app/repositories/file_repository.py
Normal file
16
app/repositories/file_repository.py
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app.models import FileModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class FileRepository(BaseRepository[FileModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def find_by_user_id(self, user_id: int):
|
||||||
|
query = select(self.model.user_id).where(self.model.user_id == user_id)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
39
app/repositories/map_access_repository.py
Normal file
39
app/repositories/map_access_repository.py
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from sqlalchemy import select
|
||||||
|
from uuid6 import UUID
|
||||||
|
|
||||||
|
from app.models import MapAccessModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class MapAccessRepository(BaseRepository[MapAccessModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def find_by_mapset(self, mapset_id: UUID):
|
||||||
|
query = select(self.model).where(self.model.mapset_id == mapset_id)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
|
|
||||||
|
async def find_by_user(self, user_id: UUID):
|
||||||
|
query = select(self.model).where(self.model.user_id == user_id)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
|
|
||||||
|
async def find_by_organization(self, organization_id: UUID):
|
||||||
|
query = select(self.model).where(self.model.organization_id == organization_id)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
|
|
||||||
|
async def find_user_access_to_mapset(self, mapset_id: UUID, user_id: UUID):
|
||||||
|
query = select(self.model).where(self.model.mapset_id == mapset_id, self.model.user_id == user_id)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
|
|
||||||
|
async def find_organization_access_to_mapset(self, mapset_id: UUID, organization_id: UUID):
|
||||||
|
query = select(self.model).where(
|
||||||
|
self.model.mapset_id == mapset_id, self.model.organization_id == organization_id
|
||||||
|
)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
8
app/repositories/map_projection_system_repository.py
Normal file
8
app/repositories/map_projection_system_repository.py
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
from app.models import MapProjectionSystemModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class MapProjectionSystemRepository(BaseRepository[MapProjectionSystemModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
8
app/repositories/map_source_repository.py
Normal file
8
app/repositories/map_source_repository.py
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
from app.models import MapSourceModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class MapSourceRepository(BaseRepository[MapSourceModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
28
app/repositories/map_source_usage_repository.py
Normal file
28
app/repositories/map_source_usage_repository.py
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from sqlalchemy import delete
|
||||||
|
|
||||||
|
from app.models import SourceUsageModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class SourceUsageRepository(BaseRepository[SourceUsageModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def bulk_update(self, mapset_id: UUID, data: List[Dict[str, Any]]) -> None:
|
||||||
|
"""Update multiple records."""
|
||||||
|
try:
|
||||||
|
delete_query = delete(self.model).where(self.model.mapset_id == mapset_id)
|
||||||
|
await db.session.execute(delete_query)
|
||||||
|
|
||||||
|
new_records = [self.model(**item) for item in data]
|
||||||
|
db.session.add_all(new_records)
|
||||||
|
|
||||||
|
await db.session.commit()
|
||||||
|
except Exception as e:
|
||||||
|
await db.session.rollback()
|
||||||
|
raise e
|
||||||
8
app/repositories/mapset_history_repository.py
Normal file
8
app/repositories/mapset_history_repository.py
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
from app.models import MapsetHistoryModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetHistoryRepository(BaseRepository[MapsetHistoryModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
258
app/repositories/mapset_repository.py
Normal file
258
app/repositories/mapset_repository.py
Normal file
|
|
@ -0,0 +1,258 @@
|
||||||
|
from ast import Dict
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from sqlalchemy import Integer, String, and_, cast, func, or_, select, update
|
||||||
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
|
from app.models import (
|
||||||
|
ClassificationModel,
|
||||||
|
MapAccessModel,
|
||||||
|
MapsetModel,
|
||||||
|
OrganizationModel,
|
||||||
|
)
|
||||||
|
from app.schemas.user_schema import UserSchema
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetRepository(BaseRepository[MapsetModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def find_all(
|
||||||
|
self,
|
||||||
|
user: UserSchema = None,
|
||||||
|
filters: list = None,
|
||||||
|
sort: list = ...,
|
||||||
|
search: str = "",
|
||||||
|
group_by: str = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0,
|
||||||
|
landing: bool = False,
|
||||||
|
) -> Tuple[List[MapsetModel], int]:
|
||||||
|
|
||||||
|
base_query = select(self.model).distinct()
|
||||||
|
|
||||||
|
base_query = base_query.join(ClassificationModel, self.model.classification_id == ClassificationModel.id)
|
||||||
|
|
||||||
|
if user and user.role.name not in {"administrator", "data_validator"}:
|
||||||
|
base_query = base_query.join(MapAccessModel, self.model.id == MapAccessModel.mapset_id, isouter=True)
|
||||||
|
|
||||||
|
if (user is None) or landing:
|
||||||
|
base_query = base_query.filter(ClassificationModel.is_open == True)
|
||||||
|
base_query = base_query.filter(self.model.is_active == True)
|
||||||
|
base_query = base_query.filter(self.model.status_validation == "approved")
|
||||||
|
elif user.role.name not in {"administrator", "data_validator"}:
|
||||||
|
base_query = base_query.filter(
|
||||||
|
or_(
|
||||||
|
# ClassificationModel.is_limited.is_(True),
|
||||||
|
# ClassificationModel.is_open.is_(True),
|
||||||
|
and_(
|
||||||
|
# ClassificationModel.is_secret.is_(True),
|
||||||
|
self.model.producer_id == user.organization.id,
|
||||||
|
),
|
||||||
|
and_(
|
||||||
|
# ClassificationModel.is_secret.is_(True),
|
||||||
|
MapAccessModel.organization_id == user.organization.id,
|
||||||
|
),
|
||||||
|
and_(
|
||||||
|
# ClassificationModel.is_secret.is_(True),
|
||||||
|
MapAccessModel.user_id == user.id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if filters:
|
||||||
|
base_query = base_query.filter(*filters)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
base_query = base_query.join(
|
||||||
|
OrganizationModel,
|
||||||
|
self.model.producer_id == OrganizationModel.id,
|
||||||
|
isouter=True,
|
||||||
|
).filter(
|
||||||
|
or_(
|
||||||
|
*[
|
||||||
|
cast(getattr(self.model, col), String).ilike(f"%{search}%")
|
||||||
|
for col in self.model.__table__.columns.keys()
|
||||||
|
],
|
||||||
|
OrganizationModel.name.ilike(f"%{search}%"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if group_by:
|
||||||
|
base_query = base_query.group_by(getattr(self.model, group_by))
|
||||||
|
|
||||||
|
count_query = select(func.count()).select_from(base_query.subquery())
|
||||||
|
total = await db.session.scalar(count_query)
|
||||||
|
|
||||||
|
if not sort or sort is ...:
|
||||||
|
base_query = base_query.order_by(self.model.order.asc())
|
||||||
|
else:
|
||||||
|
base_query = base_query.order_by(*sort)
|
||||||
|
|
||||||
|
base_query = base_query.limit(limit).offset(offset)
|
||||||
|
|
||||||
|
result = await db.session.execute(base_query)
|
||||||
|
result = result.scalars().all()
|
||||||
|
|
||||||
|
return result, total
|
||||||
|
|
||||||
|
async def find_all_group_by_organization(
|
||||||
|
self,
|
||||||
|
user: Optional[UserSchema] = None,
|
||||||
|
mapset_filters: list = None,
|
||||||
|
organization_filters: list = None,
|
||||||
|
sort: list = None,
|
||||||
|
search: str = "",
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0,
|
||||||
|
) -> Tuple[List[Dict], int]:
|
||||||
|
|
||||||
|
mapset_filters = mapset_filters or []
|
||||||
|
organization_filters = organization_filters or []
|
||||||
|
sort = sort or [OrganizationModel.name.asc()]
|
||||||
|
|
||||||
|
if user is None:
|
||||||
|
base_mapset_query = (
|
||||||
|
select(self.model)
|
||||||
|
.join(
|
||||||
|
ClassificationModel,
|
||||||
|
self.model.classification_id == ClassificationModel.id,
|
||||||
|
)
|
||||||
|
.filter(ClassificationModel.is_open.is_(True))
|
||||||
|
)
|
||||||
|
elif user.role in {"administrator", "data-validator"}:
|
||||||
|
base_mapset_query = select(self.model)
|
||||||
|
else:
|
||||||
|
user_org_id = user.organization.id if user.organization else None
|
||||||
|
|
||||||
|
base_mapset_query = (
|
||||||
|
select(self.model)
|
||||||
|
.join(
|
||||||
|
ClassificationModel,
|
||||||
|
self.model.classification_id == ClassificationModel.id,
|
||||||
|
)
|
||||||
|
.outerjoin(MapAccessModel, self.model.id == MapAccessModel.mapset_id)
|
||||||
|
.filter(
|
||||||
|
or_(
|
||||||
|
ClassificationModel.is_open.is_(True),
|
||||||
|
ClassificationModel.is_limited.is_(True),
|
||||||
|
and_(
|
||||||
|
ClassificationModel.is_secret.is_(True),
|
||||||
|
self.model.producer_id == user.id,
|
||||||
|
),
|
||||||
|
and_(
|
||||||
|
ClassificationModel.is_secret.is_(True),
|
||||||
|
MapAccessModel.user_id == user.id,
|
||||||
|
),
|
||||||
|
and_(
|
||||||
|
ClassificationModel.is_secret.is_(True),
|
||||||
|
user_org_id is not None,
|
||||||
|
MapAccessModel.organization_id == user_org_id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
filtered_mapset_query = base_mapset_query
|
||||||
|
if mapset_filters:
|
||||||
|
filtered_mapset_query = filtered_mapset_query.filter(*mapset_filters)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
search_filters = []
|
||||||
|
for col in self.model.__table__.columns.keys():
|
||||||
|
if hasattr(self.model, col):
|
||||||
|
search_filters.append(cast(getattr(self.model, col), String).ilike(f"%{search}%"))
|
||||||
|
|
||||||
|
if search_filters:
|
||||||
|
filtered_mapset_query = filtered_mapset_query.filter(or_(*search_filters))
|
||||||
|
|
||||||
|
producer_ids_subquery = select(self.model.producer_id).select_from(filtered_mapset_query.subquery()).distinct()
|
||||||
|
|
||||||
|
org_query = select(OrganizationModel).filter(OrganizationModel.id.in_(producer_ids_subquery))
|
||||||
|
|
||||||
|
if organization_filters:
|
||||||
|
org_query = org_query.filter(*organization_filters)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
org_search_filters = []
|
||||||
|
for col in OrganizationModel.__table__.columns.keys():
|
||||||
|
if hasattr(OrganizationModel, col):
|
||||||
|
org_search_filters.append(cast(getattr(OrganizationModel, col), String).ilike(f"%{search}%"))
|
||||||
|
|
||||||
|
if org_search_filters:
|
||||||
|
org_query = org_query.filter(or_(*org_search_filters))
|
||||||
|
|
||||||
|
count_query = select(func.count()).select_from(
|
||||||
|
select(OrganizationModel.id).select_from(org_query.subquery()).distinct()
|
||||||
|
)
|
||||||
|
total = await db.session.scalar(count_query)
|
||||||
|
|
||||||
|
org_query = org_query.order_by(*sort)
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
org_query = org_query.limit(limit)
|
||||||
|
if offset:
|
||||||
|
org_query = org_query.offset(offset)
|
||||||
|
|
||||||
|
org_result = await db.session.execute(org_query)
|
||||||
|
organizations = org_result.scalars().unique().all()
|
||||||
|
|
||||||
|
org_ids = [org.id for org in organizations]
|
||||||
|
|
||||||
|
if not org_ids:
|
||||||
|
return [], total
|
||||||
|
|
||||||
|
all_mapsets_query = filtered_mapset_query.filter(self.model.producer_id.in_(org_ids)).options(
|
||||||
|
selectinload(self.model.classification)
|
||||||
|
)
|
||||||
|
|
||||||
|
all_mapsets_result = await db.session.execute(all_mapsets_query)
|
||||||
|
all_mapsets = all_mapsets_result.scalars().unique().all()
|
||||||
|
|
||||||
|
mapsets_by_org = {}
|
||||||
|
for mapset in all_mapsets:
|
||||||
|
if mapset.producer_id not in mapsets_by_org:
|
||||||
|
mapsets_by_org[mapset.producer_id] = []
|
||||||
|
mapsets_by_org[mapset.producer_id].append(mapset)
|
||||||
|
|
||||||
|
result_data = []
|
||||||
|
for org in organizations:
|
||||||
|
org_mapsets = mapsets_by_org.get(org.id, [])
|
||||||
|
|
||||||
|
result_data.append(
|
||||||
|
{
|
||||||
|
"id": org.id,
|
||||||
|
"name": org.name,
|
||||||
|
"mapsets": org_mapsets,
|
||||||
|
"found": len(org_mapsets),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return result_data, total
|
||||||
|
|
||||||
|
async def bulk_update_activation(self, mapset_ids: List[UUID], is_active: bool) -> None:
|
||||||
|
for mapset_id in mapset_ids:
|
||||||
|
await db.session.execute(update(self.model).where(self.model.id == mapset_id).values(is_active=is_active))
|
||||||
|
await db.session.commit()
|
||||||
|
|
||||||
|
async def increment_view_count(self, mapset_id: UUID) -> None:
|
||||||
|
query = (
|
||||||
|
update(self.model)
|
||||||
|
.where(self.model.id == mapset_id)
|
||||||
|
.values(view_count=self.model.view_count + 1)
|
||||||
|
)
|
||||||
|
await db.session.execute(query)
|
||||||
|
await db.session.commit()
|
||||||
|
|
||||||
|
async def increment_download_count(self, mapset_id: UUID) -> None:
|
||||||
|
query = (
|
||||||
|
update(self.model)
|
||||||
|
.where(self.model.id == mapset_id)
|
||||||
|
.values(download_count=self.model.download_count + 1)
|
||||||
|
)
|
||||||
|
await db.session.execute(query)
|
||||||
|
await db.session.commit()
|
||||||
19
app/repositories/news_repository.py
Normal file
19
app/repositories/news_repository.py
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
from typing import List
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from sqlalchemy import update
|
||||||
|
|
||||||
|
from app.models import NewsModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class NewsRepository(BaseRepository[NewsModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def bulk_update_activation(self, news_ids: List[UUID], is_active: bool) -> None:
|
||||||
|
for news_id in news_ids:
|
||||||
|
await db.session.execute(update(self.model).where(self.model.id == news_id).values(is_active=is_active))
|
||||||
|
await db.session.commit()
|
||||||
318
app/repositories/organization_repository.py
Normal file
318
app/repositories/organization_repository.py
Normal file
|
|
@ -0,0 +1,318 @@
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple, override
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from sqlalchemy import (
|
||||||
|
Integer,
|
||||||
|
Numeric,
|
||||||
|
String,
|
||||||
|
Unicode,
|
||||||
|
UnicodeText,
|
||||||
|
and_,
|
||||||
|
cast,
|
||||||
|
exists,
|
||||||
|
func,
|
||||||
|
or_,
|
||||||
|
select,
|
||||||
|
desc,
|
||||||
|
text,
|
||||||
|
update as sqlalchemy_update
|
||||||
|
)
|
||||||
|
|
||||||
|
from app.models.classification_model import ClassificationModel
|
||||||
|
from app.models.map_access_model import MapAccessModel
|
||||||
|
from app.models.mapset_model import MapsetModel
|
||||||
|
from app.models.organization_model import OrganizationModel
|
||||||
|
from app.schemas.user_schema import UserSchema
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class OrganizationRepository(BaseRepository[OrganizationModel]):
|
||||||
|
def __init__(self, model, mapset_model: MapsetModel):
|
||||||
|
super().__init__(model)
|
||||||
|
self.mapset_model = mapset_model
|
||||||
|
|
||||||
|
async def flag_delete_organization(self, id):
|
||||||
|
return await self.flag_delete_organization(id)
|
||||||
|
|
||||||
|
async def find_by_name(self, name: str, sensitive: bool = False):
|
||||||
|
if not sensitive:
|
||||||
|
name = name.lower()
|
||||||
|
|
||||||
|
query = select(self.model)
|
||||||
|
if not sensitive:
|
||||||
|
query = query.where(self.model.name == name)
|
||||||
|
else:
|
||||||
|
query = query.where(self.model.name.ilike(f"%{name}%"))
|
||||||
|
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def find_all(
|
||||||
|
self,
|
||||||
|
user: UserSchema | None,
|
||||||
|
filters: list,
|
||||||
|
sort: list | None = None,
|
||||||
|
search: str = "",
|
||||||
|
group_by: str = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0,
|
||||||
|
landing: bool = False,
|
||||||
|
) -> Tuple[List[OrganizationModel], int]:
|
||||||
|
"""Find all records with pagination."""
|
||||||
|
if sort is None:
|
||||||
|
sort = []
|
||||||
|
|
||||||
|
mapset_count = func.count(self.mapset_model.id).label("count_mapset")
|
||||||
|
|
||||||
|
base_query = select(
|
||||||
|
self.model.id,
|
||||||
|
self.model.name,
|
||||||
|
self.model.description,
|
||||||
|
self.model.thumbnail,
|
||||||
|
self.model.address,
|
||||||
|
self.model.phone_number,
|
||||||
|
self.model.email,
|
||||||
|
self.model.website,
|
||||||
|
mapset_count,
|
||||||
|
self.model.is_active,
|
||||||
|
self.model.is_deleted,
|
||||||
|
self.model.created_at,
|
||||||
|
self.model.modified_at,
|
||||||
|
).select_from(self.model)
|
||||||
|
|
||||||
|
# Use simple outerjoin first, then apply WHERE conditions
|
||||||
|
# This ensures organizations without mapsets are still included
|
||||||
|
base_query = base_query.outerjoin(
|
||||||
|
self.mapset_model,
|
||||||
|
self.model.id == self.mapset_model.producer_id
|
||||||
|
)
|
||||||
|
base_query = base_query.outerjoin(
|
||||||
|
ClassificationModel,
|
||||||
|
self.mapset_model.classification_id == ClassificationModel.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Apply mapset-level filters only to the mapset records, not to the organization join
|
||||||
|
mapset_conditions = [
|
||||||
|
or_(
|
||||||
|
self.mapset_model.id.is_(None), # Allow organizations without mapsets
|
||||||
|
and_(
|
||||||
|
self.mapset_model.is_active.is_(True),
|
||||||
|
self.mapset_model.is_deleted.is_(False),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add user-specific filters for mapsets
|
||||||
|
# When landing=True, count all mapsets without filtering by user organization
|
||||||
|
if (user is None) or landing:
|
||||||
|
mapset_conditions.append(
|
||||||
|
or_(
|
||||||
|
self.mapset_model.id.is_(None), # Organizations without mapsets
|
||||||
|
and_(
|
||||||
|
self.mapset_model.status_validation == "approved",
|
||||||
|
ClassificationModel.is_open.is_(True)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif user.role not in {"administrator", "data_validator"}:
|
||||||
|
# When landing=False and user is not admin, filter by user organization
|
||||||
|
base_query = base_query.outerjoin(
|
||||||
|
MapAccessModel,
|
||||||
|
and_(
|
||||||
|
self.mapset_model.id == MapAccessModel.mapset_id,
|
||||||
|
or_(
|
||||||
|
MapAccessModel.organization_id == user.organization.id,
|
||||||
|
MapAccessModel.user_id == user.id,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
mapset_conditions.append(
|
||||||
|
or_(
|
||||||
|
self.mapset_model.id.is_(None), # Organizations without mapsets
|
||||||
|
ClassificationModel.is_limited.is_(True),
|
||||||
|
ClassificationModel.is_open.is_(True),
|
||||||
|
and_(
|
||||||
|
ClassificationModel.is_secret.is_(True),
|
||||||
|
self.mapset_model.producer_id == user.organization.id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Apply all mapset conditions
|
||||||
|
base_query = base_query.where(and_(*mapset_conditions))
|
||||||
|
|
||||||
|
if hasattr(self.model, "is_deleted"):
|
||||||
|
base_query = base_query.where(self.model.is_deleted.is_(False))
|
||||||
|
|
||||||
|
if filters:
|
||||||
|
base_query = base_query.where(*filters)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
search_filters = []
|
||||||
|
for col in self.model.__table__.columns.keys():
|
||||||
|
column = getattr(self.model, col)
|
||||||
|
if isinstance(column.type, (String, Unicode, UnicodeText)):
|
||||||
|
search_filters.append(column.ilike(f"%{search}%"))
|
||||||
|
elif isinstance(column.type, (Integer, Numeric)):
|
||||||
|
try:
|
||||||
|
num_val = float(search)
|
||||||
|
search_filters.append(cast(column, String) == str(num_val))
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if search_filters:
|
||||||
|
base_query = base_query.where(or_(*search_filters))
|
||||||
|
|
||||||
|
group_columns = [self.model.id]
|
||||||
|
if group_by and hasattr(self.model, group_by):
|
||||||
|
group_col = getattr(self.model, group_by)
|
||||||
|
if group_col not in group_columns:
|
||||||
|
group_columns.append(group_col)
|
||||||
|
|
||||||
|
base_query = base_query.group_by(*group_columns)
|
||||||
|
|
||||||
|
count_query = select(func.count(self.model.id)).select_from(self.model)
|
||||||
|
|
||||||
|
if hasattr(self.model, "is_deleted"):
|
||||||
|
count_query = count_query.where(self.model.is_deleted.is_(False))
|
||||||
|
|
||||||
|
if filters:
|
||||||
|
count_query = count_query.where(*filters)
|
||||||
|
|
||||||
|
if search and search_filters:
|
||||||
|
count_query = count_query.where(or_(*search_filters))
|
||||||
|
|
||||||
|
# For count query, we don't need to filter organizations based on mapset availability
|
||||||
|
# This allows organizations with 0 mapsets to be included in the count
|
||||||
|
# The filtering logic should be the same as the main query structure
|
||||||
|
# but we don't need the mapset join conditions for counting organizations
|
||||||
|
pass
|
||||||
|
|
||||||
|
total = await db.session.scalar(count_query)
|
||||||
|
|
||||||
|
if sort:
|
||||||
|
base_query = base_query.order_by(*sort)
|
||||||
|
else:
|
||||||
|
base_query = base_query.order_by(desc(mapset_count))
|
||||||
|
|
||||||
|
base_query = base_query.limit(limit).offset(offset)
|
||||||
|
|
||||||
|
result = await db.session.execute(base_query)
|
||||||
|
items = result.mappings().all()
|
||||||
|
|
||||||
|
return items, total
|
||||||
|
|
||||||
|
@override
|
||||||
|
async def update(self, id: UUID, data: Dict[str, Any], refresh: bool = True) -> Optional[OrganizationModel]:
|
||||||
|
"""Update record with optimization."""
|
||||||
|
clean_data = {k: v for k, v in data.items() if v is not None}
|
||||||
|
|
||||||
|
if not clean_data:
|
||||||
|
return await self.find_by_id(None, id) if refresh else None
|
||||||
|
|
||||||
|
query = (
|
||||||
|
sqlalchemy_update(self.model)
|
||||||
|
.where(self.model.id == id)
|
||||||
|
.values(**clean_data)
|
||||||
|
.execution_options(synchronize_session="fetch")
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
await db.session.commit()
|
||||||
|
|
||||||
|
if result.rowcount == 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return await self.find_by_id(None, id) if refresh else None
|
||||||
|
|
||||||
|
@override
|
||||||
|
async def find_by_id(self, user: UserSchema | None, id: UUID) -> Optional[OrganizationModel]:
|
||||||
|
if user is None:
|
||||||
|
mapset_condition = and_(
|
||||||
|
self.mapset_model.is_active.is_(True),
|
||||||
|
self.mapset_model.is_deleted.is_(False),
|
||||||
|
self.mapset_model.status_validation == "approved",
|
||||||
|
self.mapset_model.producer_id == self.model.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
mapset_filter = or_(
|
||||||
|
mapset_condition,
|
||||||
|
self.mapset_model.id.is_(None)
|
||||||
|
)
|
||||||
|
elif user.role in {"administrator", "data_validator"}:
|
||||||
|
mapset_condition = and_(
|
||||||
|
self.mapset_model.is_active.is_(True),
|
||||||
|
self.mapset_model.is_deleted.is_(False),
|
||||||
|
self.mapset_model.producer_id == self.model.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
mapset_filter = or_(
|
||||||
|
mapset_condition,
|
||||||
|
self.mapset_model.id.is_(None)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
mapset_condition = and_(
|
||||||
|
or_(
|
||||||
|
ClassificationModel.is_limited.is_(True),
|
||||||
|
ClassificationModel.is_open.is_(True),
|
||||||
|
and_(
|
||||||
|
ClassificationModel.is_secret.is_(True),
|
||||||
|
self.mapset_model.producer_id == user.organization.id,
|
||||||
|
),
|
||||||
|
and_(
|
||||||
|
ClassificationModel.is_secret.is_(True),
|
||||||
|
MapAccessModel.organization_id == user.organization.id,
|
||||||
|
),
|
||||||
|
and_(
|
||||||
|
ClassificationModel.is_secret.is_(True),
|
||||||
|
MapAccessModel.user_id == user.id,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
self.mapset_model.is_active.is_(True),
|
||||||
|
self.mapset_model.is_deleted.is_(False),
|
||||||
|
self.mapset_model.producer_id == self.model.id,
|
||||||
|
)
|
||||||
|
mapset_filter = or_(
|
||||||
|
mapset_condition,
|
||||||
|
self.mapset_model.id.is_(None)
|
||||||
|
)
|
||||||
|
|
||||||
|
query = (
|
||||||
|
select(
|
||||||
|
self.model.id,
|
||||||
|
self.model.name,
|
||||||
|
self.model.description,
|
||||||
|
self.model.thumbnail,
|
||||||
|
self.model.address,
|
||||||
|
self.model.phone_number,
|
||||||
|
self.model.email,
|
||||||
|
self.model.website,
|
||||||
|
func.count(self.mapset_model.id).label("count_mapset"),
|
||||||
|
self.model.is_active,
|
||||||
|
self.model.is_deleted,
|
||||||
|
self.model.created_at,
|
||||||
|
self.model.modified_at,
|
||||||
|
)
|
||||||
|
.outerjoin(self.mapset_model, self.model.id == self.mapset_model.producer_id)
|
||||||
|
.outerjoin(
|
||||||
|
ClassificationModel,
|
||||||
|
self.mapset_model.classification_id == ClassificationModel.id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if user is not None and user.role not in {"administrator", "data_validator"}:
|
||||||
|
query = query.outerjoin(MapAccessModel, self.mapset_model.id == MapAccessModel.mapset_id)
|
||||||
|
|
||||||
|
if user is None or user.role not in {"administrator", "data_validator"}:
|
||||||
|
query = query.where(mapset_filter)
|
||||||
|
|
||||||
|
if hasattr(self.model, "is_deleted"):
|
||||||
|
query = query.filter(self.model.is_deleted.is_(False))
|
||||||
|
|
||||||
|
query = query.filter(self.model.id == id)
|
||||||
|
query = query.group_by(self.model.id)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.mappings().one_or_none()
|
||||||
8
app/repositories/regional_repository.py
Normal file
8
app/repositories/regional_repository.py
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
from app.models import RegionalModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class RegionalRepository(BaseRepository[RegionalModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
24
app/repositories/role_repository.py
Normal file
24
app/repositories/role_repository.py
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app.models import RoleModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class RoleRepository(BaseRepository[RoleModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def find_by_name(self, name: str) -> RoleModel:
|
||||||
|
"""Find record by name."""
|
||||||
|
query = select(self.model).where(self.model.name == name)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def get_list_by_names(self, name: List[str]) -> List[RoleModel]:
|
||||||
|
query = select(self.model).filter(self.model.name.in_(name))
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
39
app/repositories/token_repository.py
Normal file
39
app/repositories/token_repository.py
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from pytz import timezone
|
||||||
|
from sqlalchemy import select
|
||||||
|
from uuid6 import UUID
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.models import RefreshTokenModel
|
||||||
|
from app.repositories import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class TokenRepository(BaseRepository[RefreshTokenModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def find_valid_token(self, token: str, user_id: UUID):
|
||||||
|
query = select(self.model).where(
|
||||||
|
self.model.token == token,
|
||||||
|
self.model.user_id == str(user_id),
|
||||||
|
self.model.expires_at > datetime.now(timezone(settings.TIMEZONE)),
|
||||||
|
self.model.revoked == False,
|
||||||
|
)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().first()
|
||||||
|
|
||||||
|
async def revoke_token(self, token: str):
|
||||||
|
token_obj = await self.find_by_token(token)
|
||||||
|
if token_obj:
|
||||||
|
token_obj.revoked = True
|
||||||
|
db.session.add(token_obj)
|
||||||
|
await db.session.commit()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def find_by_token(self, token: str):
|
||||||
|
query = select(self.model).where(self.model.token == token)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().first()
|
||||||
41
app/repositories/user_repository.py
Normal file
41
app/repositories/user_repository.py
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from fastapi_async_sqlalchemy import db
|
||||||
|
from sqlalchemy import select, update
|
||||||
|
from uuid6 import UUID
|
||||||
|
|
||||||
|
from app.models import UserModel
|
||||||
|
|
||||||
|
from . import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class UserRepository(BaseRepository[UserModel]):
|
||||||
|
def __init__(self, model):
|
||||||
|
super().__init__(model)
|
||||||
|
|
||||||
|
async def find_by_username(self, username: str) -> UserModel | None:
|
||||||
|
query = select(self.model).filter(self.model.username == username)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def find_by_email(self, email: str) -> UserModel | None:
|
||||||
|
query = select(self.model).filter(self.model.email == email)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def find_by_id(self, id: UUID) -> UserModel | None:
|
||||||
|
query = select(self.model).filter(self.model.id == id)
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def find_all_ids(self, list_id: List) -> List[UserModel]:
|
||||||
|
query = select(self.model.id)
|
||||||
|
query = query.where(self.model.id.in_(list_id))
|
||||||
|
|
||||||
|
result = await db.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
|
|
||||||
|
async def bulk_update_activation(self, user_ids: List[UUID], is_active: bool) -> None:
|
||||||
|
for user_id in user_ids:
|
||||||
|
await db.session.execute(update(self.model).where(self.model.id == user_id).values(is_active=is_active))
|
||||||
|
await db.session.commit()
|
||||||
80
app/schemas/__init__.py
Normal file
80
app/schemas/__init__.py
Normal file
|
|
@ -0,0 +1,80 @@
|
||||||
|
from .category_schema import CategoryCreateSchema, CategorySchema, CategoryUpdateSchema
|
||||||
|
from .classification_schema import (
|
||||||
|
ClassificationCreateSchema,
|
||||||
|
ClassificationSchema,
|
||||||
|
ClassificationUpdateSchema,
|
||||||
|
)
|
||||||
|
from .credential_schema import (
|
||||||
|
CredentialCreateSchema,
|
||||||
|
CredentialSchema,
|
||||||
|
CredentialUpdateSchema,
|
||||||
|
)
|
||||||
|
from .file_schema import FileSchema
|
||||||
|
from .map_access_schema import (
|
||||||
|
MapAccessCreateSchema,
|
||||||
|
MapAccessSchema,
|
||||||
|
MapAccessUpdateSchema,
|
||||||
|
)
|
||||||
|
from .map_projection_system_schema import (
|
||||||
|
MapProjectionSystemCreateSchema,
|
||||||
|
MapProjectionSystemSchema,
|
||||||
|
MapProjectionSystemUpdateSchema,
|
||||||
|
)
|
||||||
|
from .map_source_schema import (
|
||||||
|
MapSourceCreateSchema,
|
||||||
|
MapSourceSchema,
|
||||||
|
MapSourceUpdateSchema,
|
||||||
|
)
|
||||||
|
from .mapset_history_schema import MapsetHistoryCreateSchema, MapsetHistorySchema
|
||||||
|
from .mapset_schema import MapsetCreateSchema, MapsetSchema, MapsetUpdateSchema
|
||||||
|
from .news_schema import NewsCreateSchema, NewsSchema, NewsUpdateSchema
|
||||||
|
from .organization_schema import (
|
||||||
|
OrganizationCreateSchema,
|
||||||
|
OrganizationSchema,
|
||||||
|
OrganizationUpdateSchema,
|
||||||
|
)
|
||||||
|
from .regional_schema import RegionalCreateSchema, RegionalSchema, RegionalUpdateSchema
|
||||||
|
from .role_schema import RoleCreateSchema, RoleSchema, RoleUpdateSchema
|
||||||
|
from .user_schema import UserCreateSchema, UserSchema, UserUpdateSchema
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"OrganizationSchema",
|
||||||
|
"OrganizationCreateSchema",
|
||||||
|
"OrganizationUpdateSchema",
|
||||||
|
"UserSchema",
|
||||||
|
"UserCreateSchema",
|
||||||
|
"UserUpdateSchema",
|
||||||
|
"RoleSchema",
|
||||||
|
"RoleCreateSchema",
|
||||||
|
"RoleUpdateSchema",
|
||||||
|
"NewsSchema",
|
||||||
|
"NewsCreateSchema",
|
||||||
|
"NewsUpdateSchema",
|
||||||
|
"FileSchema",
|
||||||
|
"CredentialSchema",
|
||||||
|
"CredentialCreateSchema",
|
||||||
|
"CredentialUpdateSchema",
|
||||||
|
"MapsetSchema",
|
||||||
|
"MapsetCreateSchema",
|
||||||
|
"MapsetUpdateSchema",
|
||||||
|
"MapSourceSchema",
|
||||||
|
"MapSourceCreateSchema",
|
||||||
|
"MapSourceUpdateSchema",
|
||||||
|
"MapProjectionSystemSchema",
|
||||||
|
"MapProjectionSystemCreateSchema",
|
||||||
|
"MapProjectionSystemUpdateSchema",
|
||||||
|
"MapAccessSchema",
|
||||||
|
"MapAccessCreateSchema",
|
||||||
|
"MapAccessUpdateSchema",
|
||||||
|
"MapsetHistorySchema",
|
||||||
|
"MapsetHistoryCreateSchema",
|
||||||
|
"CategoryCreateSchema",
|
||||||
|
"CategorySchema",
|
||||||
|
"CategoryUpdateSchema",
|
||||||
|
"ClassificationSchema",
|
||||||
|
"ClassificationCreateSchema",
|
||||||
|
"ClassificationUpdateSchema",
|
||||||
|
"RegionalSchema",
|
||||||
|
"RegionalCreateSchema",
|
||||||
|
"RegionalUpdateSchema",
|
||||||
|
]
|
||||||
28
app/schemas/base.py
Normal file
28
app/schemas/base.py
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
from typing import Generic, List, TypeVar
|
||||||
|
|
||||||
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
|
from app.utils.helpers import orjson_dumps
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
class BaseSchema(BaseModel):
|
||||||
|
"""Base Pydantic model with orjson configuration."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(
|
||||||
|
populate_by_name=True,
|
||||||
|
from_attributes=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def model_dump_json(self, **kwargs):
|
||||||
|
"""Override default json serialization to use orjson."""
|
||||||
|
return orjson_dumps(self.model_dump(**kwargs))
|
||||||
|
|
||||||
|
|
||||||
|
class PaginatedResponse(BaseSchema, Generic[T]):
|
||||||
|
items: List[T]
|
||||||
|
total: int
|
||||||
|
limit: int
|
||||||
|
offset: int
|
||||||
|
has_more: bool
|
||||||
30
app/schemas/category_schema.py
Normal file
30
app/schemas/category_schema.py
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class CategorySchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
thumbnail: Optional[str] = None
|
||||||
|
count_mapset: int = 0
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryCreateSchema(BaseSchema):
|
||||||
|
name: str = Field(..., min_length=1)
|
||||||
|
description: Optional[str] = None
|
||||||
|
thumbnail: Optional[str] = None
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None, min_length=1)
|
||||||
|
description: Optional[str] = None
|
||||||
|
thumbnail: Optional[str] = None
|
||||||
|
is_active: Optional[bool] = None
|
||||||
29
app/schemas/classification_schema.py
Normal file
29
app/schemas/classification_schema.py
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class ClassificationSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
is_open: bool
|
||||||
|
is_limited: bool
|
||||||
|
is_secret: bool
|
||||||
|
|
||||||
|
|
||||||
|
class ClassificationCreateSchema(BaseSchema):
|
||||||
|
name: str
|
||||||
|
is_open: bool
|
||||||
|
is_limited: bool
|
||||||
|
is_secret: bool
|
||||||
|
|
||||||
|
|
||||||
|
class ClassificationUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None)
|
||||||
|
is_open: Optional[bool] = Field(None)
|
||||||
|
is_limited: Optional[bool] = Field(None)
|
||||||
|
is_secret: Optional[bool] = Field(None)
|
||||||
9
app/schemas/count_schema.py
Normal file
9
app/schemas/count_schema.py
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
from app.schemas.base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class CountSchema(BaseSchema):
|
||||||
|
mapset_count: int
|
||||||
|
organization_count: int
|
||||||
|
visitor_count: int
|
||||||
|
metadata_count: int
|
||||||
|
download_count: int
|
||||||
212
app/schemas/credential_schema.py
Normal file
212
app/schemas/credential_schema.py
Normal file
|
|
@ -0,0 +1,212 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.exceptions import UnprocessableEntity
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class CredentialBase(BaseSchema):
|
||||||
|
name: str = Field(..., description="Nama kredensial")
|
||||||
|
description: Optional[str] = Field(None, description="Deskripsi kredensial")
|
||||||
|
credential_type: str = Field(..., description="Tipe kredensial ('database', 'api', 'minio', dll)")
|
||||||
|
credential_metadata: Optional[Dict[str, Any]] = Field(
|
||||||
|
default={}, description="Metadata tidak sensitif (tidak dienkripsi)"
|
||||||
|
)
|
||||||
|
is_default: bool = Field(
|
||||||
|
default=False, description="Apakah kredensial ini digunakan sebagai default untuk tipenya"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CredentialSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
credential_type: str
|
||||||
|
credential_metadata: Optional[Dict[str, Any]]
|
||||||
|
is_default: bool
|
||||||
|
is_active: bool
|
||||||
|
created_by: UUID7Field
|
||||||
|
updated_by: Optional[UUID7Field]
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: Optional[datetime]
|
||||||
|
last_used_at: Optional[datetime]
|
||||||
|
last_used_by: Optional[UUID7Field]
|
||||||
|
|
||||||
|
|
||||||
|
class CredentialWithSensitiveDataSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
decrypted_data: Dict[str, Any]
|
||||||
|
credential_type: str
|
||||||
|
credential_metadata: Optional[Dict[str, Any]]
|
||||||
|
is_default: bool
|
||||||
|
is_active: bool
|
||||||
|
created_by: UUID7Field
|
||||||
|
updated_by: Optional[UUID7Field]
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: Optional[datetime]
|
||||||
|
last_used_at: Optional[datetime]
|
||||||
|
last_used_by: Optional[UUID7Field]
|
||||||
|
|
||||||
|
|
||||||
|
class CredentialCreateSchema(CredentialBase):
|
||||||
|
name: str = Field(..., description="Nama kredensial")
|
||||||
|
description: Optional[str] = Field(None, description="Deskripsi kredensial")
|
||||||
|
credential_type: str = Field(..., description="Tipe kredensial ('database', 'api', 'minio', dll)")
|
||||||
|
credential_metadata: Optional[Dict[str, Any]] = Field(
|
||||||
|
default={}, description="Metadata tidak sensitif (tidak dienkripsi)"
|
||||||
|
)
|
||||||
|
is_default: bool = Field(
|
||||||
|
default=False, description="Apakah kredensial ini digunakan sebagai default untuk tipenya"
|
||||||
|
)
|
||||||
|
is_active: bool = Field(default=True, description="Status aktif kredensial")
|
||||||
|
sensitive_data: Dict[str, Any] = Field(..., description="Data sensitif yang akan dienkripsi")
|
||||||
|
|
||||||
|
@field_validator("credential_type")
|
||||||
|
@classmethod
|
||||||
|
def validate_credential_type(cls, v):
|
||||||
|
allowed_types = {"database", "api", "minio", "ssh", "smtp", "ftp", "geoserver", "geonetwork"}
|
||||||
|
if v not in allowed_types:
|
||||||
|
raise UnprocessableEntity(f'credential_type harus salah satu dari: {", ".join(allowed_types)}')
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator("sensitive_data")
|
||||||
|
@classmethod
|
||||||
|
def validate_sensitive_data(cls, v, values):
|
||||||
|
"""Validasi data sensitif berdasarkan tipe kredensial."""
|
||||||
|
credential_type = values.data.get("credential_type", "")
|
||||||
|
|
||||||
|
# Validasi untuk database
|
||||||
|
if credential_type == "database":
|
||||||
|
required_fields = {"host", "port", "username", "password", "database_name"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for database: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk MinIO
|
||||||
|
elif credential_type == "minio":
|
||||||
|
required_fields = {"endpoint", "access_key", "secret_key", "secure", "bucket_name"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for minio: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk API
|
||||||
|
elif credential_type == "api":
|
||||||
|
required_fields = {"base_url", "api_key"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for api: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk SSH
|
||||||
|
elif credential_type == "ssh":
|
||||||
|
if not ("password" in v or "private_key" in v):
|
||||||
|
raise UnprocessableEntity("Either 'password' or 'private_key' is required for SSH credentials")
|
||||||
|
|
||||||
|
required_fields = {"host", "port", "username"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for ssh: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk SMTP
|
||||||
|
elif credential_type == "smtp":
|
||||||
|
required_fields = {"host", "port", "username", "password", "use_tls"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for smtp: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk FTP
|
||||||
|
elif credential_type == "ftp":
|
||||||
|
required_fields = {"host", "port", "username", "password"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for ftp: {', '.join(missing)}")
|
||||||
|
|
||||||
|
elif credential_type == "server":
|
||||||
|
required_fields = {"host", "port", "username", "password"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for server: {', '.join(missing)}")
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class CredentialUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None, description="Nama kredensial")
|
||||||
|
description: Optional[str] = Field(None, description="Deskripsi kredensial")
|
||||||
|
credential_type: Optional[str] = Field(None, description="Tipe kredensial ('database', 'api', 'minio', dll)")
|
||||||
|
credential_metadata: Optional[Dict[str, Any]] = Field(
|
||||||
|
default={}, description="Metadata tidak sensitif (tidak dienkripsi)"
|
||||||
|
)
|
||||||
|
is_default: Optional[bool] = Field(
|
||||||
|
default=False, description="Apakah kredensial ini digunakan sebagai default untuk tipenya"
|
||||||
|
)
|
||||||
|
is_active: Optional[bool] = Field(default=True, description="Status aktif kredensial")
|
||||||
|
sensitive_data: Optional[Dict[str, Any]] = Field(None, description="Data sensitif yang akan dienkripsi")
|
||||||
|
|
||||||
|
@field_validator("credential_type")
|
||||||
|
@classmethod
|
||||||
|
def validate_credential_type(cls, v):
|
||||||
|
allowed_types = {"database", "api", "minio", "ssh", "smtp", "ftp"}
|
||||||
|
if v not in allowed_types:
|
||||||
|
raise UnprocessableEntity(f'credential_type harus salah satu dari: {", ".join(allowed_types)}')
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator("sensitive_data")
|
||||||
|
@classmethod
|
||||||
|
def validate_sensitive_data(cls, v, values):
|
||||||
|
"""Validasi data sensitif berdasarkan tipe kredensial."""
|
||||||
|
credential_type = values.data.get("credential_type", "")
|
||||||
|
|
||||||
|
# Validasi untuk database
|
||||||
|
if credential_type == "database":
|
||||||
|
required_fields = {"host", "port", "username", "password", "database_name"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for database: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk MinIO
|
||||||
|
elif credential_type == "minio":
|
||||||
|
required_fields = {"endpoint", "access_key", "secret_key", "secure", "bucket_name"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for minio: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk API
|
||||||
|
elif credential_type == "api":
|
||||||
|
required_fields = {"base_url", "api_key"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for api: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk SSH
|
||||||
|
elif credential_type == "ssh":
|
||||||
|
if not ("password" in v or "private_key" in v):
|
||||||
|
raise UnprocessableEntity("Either 'password' or 'private_key' is required for SSH credentials")
|
||||||
|
|
||||||
|
required_fields = {"host", "port", "username"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for ssh: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk SMTP
|
||||||
|
elif credential_type == "smtp":
|
||||||
|
required_fields = {"host", "port", "username", "password", "use_tls"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for smtp: {', '.join(missing)}")
|
||||||
|
|
||||||
|
# Validasi untuk FTP
|
||||||
|
elif credential_type == "ftp":
|
||||||
|
required_fields = {"host", "port", "username", "password"}
|
||||||
|
missing
|
||||||
|
|
||||||
|
elif credential_type == "server":
|
||||||
|
required_fields = {"host", "port", "username", "password"}
|
||||||
|
missing = required_fields - set(v.keys())
|
||||||
|
if missing:
|
||||||
|
raise UnprocessableEntity(f"Missing required fields for server: {', '.join(missing)}")
|
||||||
5
app/schemas/error_schema.py
Normal file
5
app/schemas/error_schema.py
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorResponse(BaseSchema):
|
||||||
|
message: str
|
||||||
34
app/schemas/feedback_schema.py
Normal file
34
app/schemas/feedback_schema.py
Normal file
|
|
@ -0,0 +1,34 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class FeedbackBase(BaseModel):
|
||||||
|
score: int
|
||||||
|
tujuan_tercapai: Optional[bool] = True
|
||||||
|
tujuan_ditemukan: Optional[bool] = True
|
||||||
|
tujuan: Optional[str] = None
|
||||||
|
sektor: Optional[str] = None
|
||||||
|
email: Optional[str] = None
|
||||||
|
saran: Optional[str] = None
|
||||||
|
source_url: Optional[str] = None
|
||||||
|
source_access: Optional[str] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
gender: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
class FeedbackCreateSchema(FeedbackBase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FeedbackUpdateSchema(FeedbackBase):
|
||||||
|
score: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
class FeedbackSchema(FeedbackBase):
|
||||||
|
id: int
|
||||||
|
datetime: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
33
app/schemas/file_schema.py
Normal file
33
app/schemas/file_schema.py
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
from .user_schema import UserSchema
|
||||||
|
|
||||||
|
|
||||||
|
class FileSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
object_name: str
|
||||||
|
uploaded_by: UserSchema
|
||||||
|
created_at: datetime
|
||||||
|
modified_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
|
||||||
|
class FileCreateSchema(BaseSchema):
|
||||||
|
filename: str
|
||||||
|
content_type: str
|
||||||
|
size: int
|
||||||
|
description: Optional[str] = None
|
||||||
|
url: str
|
||||||
|
|
||||||
|
|
||||||
|
class FileUpdateSchema(BaseSchema):
|
||||||
|
filename: Optional[str] = Field(None, title="File Name")
|
||||||
|
content_type: Optional[str] = Field(None, title="Content Type")
|
||||||
|
size: Optional[int] = Field(None, title="File Size")
|
||||||
|
description: Optional[str] = Field(None, title="File Description")
|
||||||
|
url: Optional[str] = Field(None, title="File URL")
|
||||||
30
app/schemas/map_access_schema.py
Normal file
30
app/schemas/map_access_schema.py
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class MapAccessSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
mapset_id: UUID7Field
|
||||||
|
user_id: Optional[UUID7Field] = None
|
||||||
|
organization_id: Optional[UUID7Field] = None
|
||||||
|
can_read: bool
|
||||||
|
can_write: bool
|
||||||
|
can_delete: bool
|
||||||
|
|
||||||
|
|
||||||
|
class MapAccessCreateSchema(BaseSchema):
|
||||||
|
mapset_id: UUID7Field
|
||||||
|
user_id: Optional[UUID7Field] = None
|
||||||
|
organization_id: Optional[UUID7Field] = None
|
||||||
|
can_read: bool
|
||||||
|
can_write: bool
|
||||||
|
can_delete: bool
|
||||||
|
|
||||||
|
|
||||||
|
class MapAccessUpdateSchema(BaseSchema):
|
||||||
|
can_read: Optional[bool] = None
|
||||||
|
can_write: Optional[bool] = None
|
||||||
|
can_delete: Optional[bool] = None
|
||||||
20
app/schemas/map_projection_system_schema.py
Normal file
20
app/schemas/map_projection_system_schema.py
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class MapProjectionSystemSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
|
||||||
|
|
||||||
|
class MapProjectionSystemCreateSchema(BaseSchema):
|
||||||
|
name: str = Field(..., min_length=1, max_length=50)
|
||||||
|
|
||||||
|
|
||||||
|
class MapProjectionSystemUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None, min_length=1, max_length=50)
|
||||||
38
app/schemas/map_source_schema.py
Normal file
38
app/schemas/map_source_schema.py
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.schemas import CredentialSchema
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class MapSourceSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
credential: CredentialSchema
|
||||||
|
url: Optional[str]
|
||||||
|
is_active: bool
|
||||||
|
is_deleted: bool
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: Optional[datetime]
|
||||||
|
|
||||||
|
|
||||||
|
class MapSourceCreateSchema(BaseSchema):
|
||||||
|
name: str = Field(..., min_length=1, max_length=50)
|
||||||
|
description: Optional[str] = Field(None)
|
||||||
|
url: Optional[str]
|
||||||
|
credential_id: UUID7Field
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class MapSourceUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None, min_length=1, max_length=50)
|
||||||
|
description: Optional[str] = Field(None)
|
||||||
|
url: Optional[str] = Field(None)
|
||||||
|
credential_id: Optional[UUID7Field]
|
||||||
|
is_active: Optional[bool] = None
|
||||||
|
is_deleted: Optional[bool] = None
|
||||||
24
app/schemas/mapset_history_schema.py
Normal file
24
app/schemas/mapset_history_schema.py
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.schemas.user_schema import UserSchema
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetHistorySchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
mapset_id: UUID7Field
|
||||||
|
validation_type: str
|
||||||
|
notes: Optional[str]
|
||||||
|
timestamp: datetime
|
||||||
|
user_info: UserSchema = Field(alias="user")
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetHistoryCreateSchema(BaseSchema):
|
||||||
|
mapset_id: UUID7Field
|
||||||
|
validation_type: str = Field(..., min_length=1)
|
||||||
|
notes: Optional[str] = None
|
||||||
98
app/schemas/mapset_schema.py
Normal file
98
app/schemas/mapset_schema.py
Normal file
|
|
@ -0,0 +1,98 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.schemas.category_schema import CategorySchema
|
||||||
|
from app.schemas.classification_schema import ClassificationSchema
|
||||||
|
from app.schemas.map_projection_system_schema import MapProjectionSystemSchema
|
||||||
|
from app.schemas.map_source_schema import MapSourceSchema
|
||||||
|
from app.schemas.organization_schema import OrganizationWithMapsetSchema
|
||||||
|
from app.schemas.regional_schema import RegionalSchema
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
description: str
|
||||||
|
scale: Optional[str]
|
||||||
|
layer_url: Optional[str]
|
||||||
|
metadata_url: Optional[str]
|
||||||
|
status_validation: Optional[str]
|
||||||
|
classification: str
|
||||||
|
data_status: str
|
||||||
|
data_update_period: Optional[str]
|
||||||
|
data_version: Optional[str]
|
||||||
|
coverage_level: Optional[str]
|
||||||
|
coverage_area: Optional[str]
|
||||||
|
layer_type: Optional[str]
|
||||||
|
category: CategorySchema
|
||||||
|
projection_system: MapProjectionSystemSchema
|
||||||
|
producer: OrganizationWithMapsetSchema
|
||||||
|
regional: Optional[RegionalSchema]
|
||||||
|
sources: Optional[List[MapSourceSchema]] = Field([])
|
||||||
|
classification: ClassificationSchema
|
||||||
|
view_count: int
|
||||||
|
download_count: int
|
||||||
|
is_popular: bool
|
||||||
|
is_active: bool
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetByOrganizationSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
found: int
|
||||||
|
mapsets: List[MapsetSchema]
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetCreateSchema(BaseSchema):
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = Field(None)
|
||||||
|
scale: Optional[str] = Field(None)
|
||||||
|
layer_url: str
|
||||||
|
metadata_url: Optional[str] = Field(None)
|
||||||
|
status_validation: str
|
||||||
|
layer_type: Optional[str] = Field(None)
|
||||||
|
projection_system_id: UUID7Field
|
||||||
|
category_id: UUID7Field
|
||||||
|
classification_id: UUID7Field
|
||||||
|
source_id: Optional[List[UUID7Field]] = Field(None)
|
||||||
|
regional_id: UUID7Field
|
||||||
|
producer_id: UUID7Field
|
||||||
|
data_status: str
|
||||||
|
data_update_period: Optional[str] = Field(default=None)
|
||||||
|
data_version: Optional[str] = Field(default=None)
|
||||||
|
coverage_level: Optional[str] = Field(default=None)
|
||||||
|
coverage_area: Optional[str] = Field(default=None)
|
||||||
|
is_popular: bool = Field(default=False)
|
||||||
|
is_active: bool = Field(default=True)
|
||||||
|
notes: Optional[str] = Field(None)
|
||||||
|
|
||||||
|
|
||||||
|
class MapsetUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None)
|
||||||
|
description: Optional[str] = Field(None)
|
||||||
|
scale: Optional[str] = Field(None)
|
||||||
|
layer_url: Optional[str] = Field(None)
|
||||||
|
metadata_url: Optional[str] = Field(None)
|
||||||
|
status_validation: Optional[str] = Field(None)
|
||||||
|
layer_type: Optional[str] = Field(None)
|
||||||
|
projection_system_id: Optional[UUID7Field] = Field(None)
|
||||||
|
category_id: Optional[UUID7Field] = Field(None)
|
||||||
|
classification_id: Optional[UUID7Field] = Field(None)
|
||||||
|
source_id: Optional[List[UUID7Field]] = Field(None)
|
||||||
|
regional_id: Optional[UUID7Field] = Field(None)
|
||||||
|
producer_id: Optional[UUID7Field] = Field(None)
|
||||||
|
data_status: Optional[str] = Field(None)
|
||||||
|
data_update_period: Optional[str] = Field(None)
|
||||||
|
data_version: Optional[str] = Field(None)
|
||||||
|
coverage_level: Optional[str] = Field(None)
|
||||||
|
coverage_area: Optional[str] = Field(None)
|
||||||
|
is_popular: Optional[bool] = Field(None)
|
||||||
|
is_active: Optional[bool] = Field(None)
|
||||||
|
notes: Optional[str] = Field(None)
|
||||||
29
app/schemas/news_schema.py
Normal file
29
app/schemas/news_schema.py
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class NewsSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
thumbnail: Optional[str] = None
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class NewsCreateSchema(BaseSchema):
|
||||||
|
name: str = Field(..., min_length=1)
|
||||||
|
description: Optional[str] = None
|
||||||
|
thumbnail: Optional[str] = None
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class NewsUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None, min_length=1)
|
||||||
|
description: Optional[str] = None
|
||||||
|
thumbnail: Optional[str] = None
|
||||||
|
is_active: Optional[bool] = None
|
||||||
57
app/schemas/organization_schema.py
Normal file
57
app/schemas/organization_schema.py
Normal file
|
|
@ -0,0 +1,57 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import EmailStr, Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class OrganizationSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
thumbnail: Optional[str]
|
||||||
|
address: Optional[str]
|
||||||
|
phone_number: Optional[str]
|
||||||
|
email: Optional[EmailStr]
|
||||||
|
website: Optional[str]
|
||||||
|
count_mapset: int = 0
|
||||||
|
is_active: bool
|
||||||
|
created_at: datetime
|
||||||
|
modified_at: Optional[datetime]
|
||||||
|
|
||||||
|
|
||||||
|
class OrganizationWithMapsetSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
thumbnail: Optional[str]
|
||||||
|
address: Optional[str]
|
||||||
|
phone_number: Optional[str]
|
||||||
|
email: Optional[EmailStr]
|
||||||
|
website: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class OrganizationCreateSchema(BaseSchema):
|
||||||
|
name: str = Field(..., min_length=1, max_length=100)
|
||||||
|
description: Optional[str] = Field(None, min_length=1, max_length=500)
|
||||||
|
thumbnail: Optional[str] = Field(None, min_length=1, max_length=255)
|
||||||
|
address: Optional[str] = Field(None, min_length=1, max_length=255)
|
||||||
|
phone_number: Optional[str] = Field(None, min_length=1, max_length=15)
|
||||||
|
email: Optional[EmailStr] = Field(None, max_length=100)
|
||||||
|
website: Optional[str] = Field(None, min_length=1, max_length=100)
|
||||||
|
is_active: Optional[bool] = Field(True)
|
||||||
|
|
||||||
|
|
||||||
|
class OrganizationUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None, min_length=1, max_length=100)
|
||||||
|
description: Optional[str] = Field(None, min_length=1, max_length=500)
|
||||||
|
thumbnail: Optional[str] = Field(None, min_length=1, max_length=500)
|
||||||
|
phone_number: Optional[str] = Field(None, min_length=1, max_length=15)
|
||||||
|
address: Optional[str] = Field(None, min_length=1, max_length=500)
|
||||||
|
email: Optional[EmailStr] = Field(None)
|
||||||
|
website: Optional[str] = Field(None, min_length=1, max_length=100)
|
||||||
|
is_active: Optional[bool] = Field(None)
|
||||||
|
is_deleted: Optional[bool] = Field(None)
|
||||||
32
app/schemas/regional_schema.py
Normal file
32
app/schemas/regional_schema.py
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class RegionalSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
code: str
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
thumbnail: Optional[str] = None
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class RegionalCreateSchema(BaseSchema):
|
||||||
|
code: str = Field(..., min_length=1, max_length=10)
|
||||||
|
name: str = Field(..., min_length=1, max_length=50)
|
||||||
|
description: Optional[str] = None
|
||||||
|
thumbnail: Optional[str] = None
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class RegionalUpdateSchema(BaseSchema):
|
||||||
|
code: Optional[str] = Field(None, min_length=1, max_length=10)
|
||||||
|
name: Optional[str] = Field(None, min_length=1, max_length=50)
|
||||||
|
description: Optional[str] = None
|
||||||
|
thumbnail: Optional[str] = None
|
||||||
|
is_active: Optional[bool] = None
|
||||||
55
app/schemas/role_schema.py
Normal file
55
app/schemas/role_schema.py
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.exceptions import UnprocessableEntity
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class RoleSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str = Field(..., min_length=1, max_length=50)
|
||||||
|
description: Optional[str] = Field(None)
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class RoleCreateSchema(BaseSchema):
|
||||||
|
name: str = Field(..., min_length=1, max_length=50)
|
||||||
|
description: Optional[str] = Field(None)
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
@field_validator("name")
|
||||||
|
@classmethod
|
||||||
|
def validate_name(cls, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
valid_role = ["administrator", "data_validator", "data_manager", "data_viewer"]
|
||||||
|
if value not in valid_role:
|
||||||
|
raise UnprocessableEntity(
|
||||||
|
f"Role name must be one of the following: administrator, {', '.join(valid_role)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class RoleUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None, min_length=1, max_length=50)
|
||||||
|
description: Optional[str] = Field(None)
|
||||||
|
is_active: Optional[bool] = None
|
||||||
|
|
||||||
|
@field_validator("name")
|
||||||
|
@classmethod
|
||||||
|
def validate_name(cls, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
valid_role = ["administrator", "data-validator", "data-manager", "data-observer"]
|
||||||
|
if value not in valid_role:
|
||||||
|
raise UnprocessableEntity(
|
||||||
|
f"Role name must be one of the following: administrator, {', '.join(valid_role)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return value
|
||||||
21
app/schemas/token_schema.py
Normal file
21
app/schemas/token_schema.py
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
|
||||||
|
|
||||||
|
class Token(BaseSchema):
|
||||||
|
access_token: str
|
||||||
|
refresh_token: str
|
||||||
|
expires_at: float
|
||||||
|
token_type: str = "bearer"
|
||||||
|
|
||||||
|
|
||||||
|
class TokenPayload(BaseSchema):
|
||||||
|
sub: Optional[str] = None
|
||||||
|
exp: Optional[datetime] = None
|
||||||
|
type: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RefreshTokenSchema(BaseSchema):
|
||||||
|
refresh_token: str
|
||||||
138
app/schemas/user_schema.py
Normal file
138
app/schemas/user_schema.py
Normal file
|
|
@ -0,0 +1,138 @@
|
||||||
|
import re
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import EmailStr, Field, field_validator
|
||||||
|
|
||||||
|
from app.core.data_types import UUID7Field
|
||||||
|
from app.core.exceptions import UnprocessableEntity
|
||||||
|
|
||||||
|
from .base import BaseSchema
|
||||||
|
from .organization_schema import OrganizationWithMapsetSchema
|
||||||
|
from .role_schema import RoleSchema
|
||||||
|
|
||||||
|
|
||||||
|
class UserSchema(BaseSchema):
|
||||||
|
id: UUID7Field
|
||||||
|
name: str
|
||||||
|
email: EmailStr
|
||||||
|
profile_picture: Optional[str] = None
|
||||||
|
username: str
|
||||||
|
position: Optional[str] = None
|
||||||
|
role: RoleSchema | None
|
||||||
|
employee_id: Optional[str] = None
|
||||||
|
organization: OrganizationWithMapsetSchema
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class UserCreateSchema(BaseSchema):
|
||||||
|
name: str = Field(..., min_length=4, max_length=100)
|
||||||
|
email: EmailStr
|
||||||
|
profile_picture: Optional[str] = Field(None)
|
||||||
|
username: str = Field(None, min_length=3, max_length=30, pattern=r"^[a-zA-Z0-9_]+$")
|
||||||
|
password: str = Field(..., min_length=8, max_length=128)
|
||||||
|
position: Optional[str] = Field(None)
|
||||||
|
role_id: UUID7Field
|
||||||
|
employee_id: Optional[str] = None
|
||||||
|
organization_id: UUID7Field
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
@field_validator("password")
|
||||||
|
@classmethod
|
||||||
|
def validate_password(cls, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
has_letter = any(c.isalpha() for c in value)
|
||||||
|
has_digit = any(c.isdigit() for c in value)
|
||||||
|
has_special = any(c in "@$!%*#?&" for c in value)
|
||||||
|
|
||||||
|
if not (has_letter and has_digit and has_special):
|
||||||
|
raise UnprocessableEntity(
|
||||||
|
"Password must be at least 8 characters long and contain at least one letter, one number, and one special character"
|
||||||
|
)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
@field_validator("username")
|
||||||
|
@classmethod
|
||||||
|
def validate_username(cls, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
if not re.match(r"^[a-zA-Z0-9_]+$", value):
|
||||||
|
raise UnprocessableEntity("Username can only contain letters, numbers, and underscores")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
@field_validator("email")
|
||||||
|
@classmethod
|
||||||
|
def validate_email_domain(cls, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Validasi tambahan untuk domain email jika diperlukan
|
||||||
|
value.split("@")[1]
|
||||||
|
valid_domains = ["gmail.com", "yahoo.com", "hotmail.com", "company.com"] # Sesuaikan dengan kebutuhan
|
||||||
|
|
||||||
|
# Hapus validasi ini jika tidak diperlukan atau sesuaikan dengan kebutuhan
|
||||||
|
# if domain not in valid_domains:
|
||||||
|
# raise UnprocessableEntity(f'Domain email tidak valid. Domain yang diizinkan: {", ".join(valid_domains)}')
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class UserUpdateSchema(BaseSchema):
|
||||||
|
name: Optional[str] = Field(None, min_length=2, max_length=100)
|
||||||
|
email: Optional[EmailStr] = None
|
||||||
|
profile_picture: Optional[str] = Field(None, max_length=255)
|
||||||
|
username: Optional[str] = Field(None, min_length=3, max_length=30, pattern=r"^[a-zA-Z0-9_]+$")
|
||||||
|
password: Optional[str] = Field(None, min_length=8, max_length=128)
|
||||||
|
position: Optional[str] = Field(None)
|
||||||
|
role_id: Optional[UUID7Field] = Field(None)
|
||||||
|
employee_id: Optional[str] = Field(None, max_length=50)
|
||||||
|
organization_id: Optional[UUID7Field] = Field(None)
|
||||||
|
is_active: Optional[bool] = None
|
||||||
|
|
||||||
|
@field_validator("password")
|
||||||
|
@classmethod
|
||||||
|
def validate_password(cls, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
has_letter = any(c.isalpha() for c in value)
|
||||||
|
has_digit = any(c.isdigit() for c in value)
|
||||||
|
has_special = any(c in "@$!%*#?&" for c in value)
|
||||||
|
|
||||||
|
if not (has_letter and has_digit and has_special):
|
||||||
|
raise UnprocessableEntity(
|
||||||
|
"Password must be at least 8 characters long and contain at least one letter, one number, and one special character"
|
||||||
|
)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
@field_validator("username")
|
||||||
|
@classmethod
|
||||||
|
def validate_username(cls, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
if not re.match(r"^[a-zA-Z0-9_]+$", value):
|
||||||
|
raise UnprocessableEntity("Username can only contain letters, numbers, and underscores")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
@field_validator("email")
|
||||||
|
@classmethod
|
||||||
|
def validate_email_domain(cls, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Validasi tambahan untuk domain email jika diperlukan
|
||||||
|
value.split("@")[1]
|
||||||
|
valid_domains = ["gmail.com", "yahoo.com", "hotmail.com", "company.com"] # Sesuaikan dengan kebutuhan
|
||||||
|
|
||||||
|
# Hapus validasi ini jika tidak diperlukan atau sesuaikan dengan kebutuhan
|
||||||
|
# if domain not in valid_domains:
|
||||||
|
# raise UnprocessableEntity(f'Domain email tidak valid. Domain yang diizinkan: {", ".join(valid_domains)}')
|
||||||
|
|
||||||
|
return value
|
||||||
37
app/services/__init__.py
Normal file
37
app/services/__init__.py
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
from .auth_service import AuthService
|
||||||
|
from .base import BaseService
|
||||||
|
from .category_service import CategoryService
|
||||||
|
from .classification_service import ClassificationService
|
||||||
|
from .credential_service import CredentialService
|
||||||
|
from .feedback_service import FeedbackService
|
||||||
|
from .file_service import FileService
|
||||||
|
from .map_projection_system_service import MapProjectionSystemService
|
||||||
|
from .map_source_service import MapSourceService
|
||||||
|
from .mapset_history_service import MapsetHistoryService
|
||||||
|
from .mapset_service import MapsetService
|
||||||
|
from .news_service import NewsService
|
||||||
|
from .organization_service import OrganizationService
|
||||||
|
from .regional_service import RegionalService
|
||||||
|
from .role_service import RoleService
|
||||||
|
from .user_service import UserService
|
||||||
|
from .count_service import CountService
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseService",
|
||||||
|
"OrganizationService",
|
||||||
|
"RoleService",
|
||||||
|
"UserService",
|
||||||
|
"AuthService",
|
||||||
|
"NewsService",
|
||||||
|
"FileService",
|
||||||
|
"CredentialService",
|
||||||
|
"FeedbackService",
|
||||||
|
"MapSourceService",
|
||||||
|
"MapProjectionSystemService",
|
||||||
|
"CategoryService",
|
||||||
|
"ClassificationService",
|
||||||
|
"RegionalService",
|
||||||
|
"MapsetService",
|
||||||
|
"MapsetHistoryService",
|
||||||
|
"CountService",
|
||||||
|
]
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user