diff --git a/.DS_Store b/.DS_Store
index 4ec7214..800103a 100644
Binary files a/.DS_Store and b/.DS_Store differ
diff --git a/.gitignore b/.gitignore
index 69a4676..7955214 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,5 +18,4 @@ logs/
style_temp/
services/styles/
-
cleansing_func.sql
\ No newline at end of file
diff --git a/api/deps/auth_dependency.py b/api/deps/auth_dependency.py
deleted file mode 100644
index fbb1d0d..0000000
--- a/api/deps/auth_dependency.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from fastapi import Depends, Header
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy.future import select
-from datetime import datetime
-from response import errorRes
-
-from database.connection import SessionLocal
-from database.models import User
-
-async def get_db():
- async with SessionLocal() as session:
- yield session
-
-
-async def get_current_user(
- authorization: str = Header(None),
- db: AsyncSession = Depends(get_db)
-):
- if not authorization or not authorization.startswith("Bearer "):
- raise errorRes(status_code=401, message="Missing or invalid token")
-
- token = authorization.split(" ")[1]
- result = await db.execute(select(User).where(User.active_token == token))
- user = result.scalar_one_or_none()
-
- # Case 1: Token not found → maybe replaced by new login
- if not user:
- raise errorRes(status_code=401, message="Token invalid or used by another login")
-
- # Case 2: Token expired
- if user.token_expired_at and user.token_expired_at < datetime.utcnow():
- raise errorRes(status_code=401, message="Token expired")
-
- return user
diff --git a/api/deps/role_dependency.py b/api/deps/role_dependency.py
deleted file mode 100644
index c45069a..0000000
--- a/api/deps/role_dependency.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from fastapi import Depends, status
-from api.deps.auth_dependency import get_current_user
-from response import errorRes
-
-def require_role(required_role: str):
- """
- Return a dependency function that ensures the current user has a specific role.
- Example usage:
- @router.get("/admin", dependencies=[Depends(require_role("admin"))])
- """
- async def role_checker(user = Depends(get_current_user)):
- if user.role != required_role:
- raise errorRes(
- status_code=status.HTTP_403_FORBIDDEN,
- message="Access denied",
- detail=f"Access denied: requires role '{required_role}'",
- )
- return user
-
- return role_checker
diff --git a/api/routers/auth_router.py b/api/routers/auth_router.py
deleted file mode 100644
index 8e8140e..0000000
--- a/api/routers/auth_router.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from fastapi import APIRouter, Depends
-from pydantic import BaseModel
-from sqlalchemy.ext.asyncio import AsyncSession
-from services.auth.login import loginService, get_db
-
-router = APIRouter()
-
-class LoginRequest(BaseModel):
- username: str
- password: str
-
-@router.post("/login")
-async def login(request: LoginRequest, db: AsyncSession = Depends(get_db)):
- return await loginService(request.username, request.password, db)
diff --git a/api/routers/datasets_router.py b/api/routers/datasets_router.py
index 531125d..547d3a9 100644
--- a/api/routers/datasets_router.py
+++ b/api/routers/datasets_router.py
@@ -1,6 +1,7 @@
import asyncio
from uuid import uuid4
from fastapi import APIRouter, HTTPException
+import httpx
import requests
from sqlalchemy import text
from sqlalchemy.exc import SQLAlchemyError
@@ -10,8 +11,7 @@ from response import successRes, errorRes
from services.datasets.publish_geonetwork import publish_metadata
from services.datasets.publish_geoserver import publish_layer_to_geoserver
from services.datasets.metadata import update_job_status
-from services.upload_file.upload_ws import report_progress
-from core.config import GEOSERVER_URL, GEOSERVER_USER, GEOSERVER_PASS, QGIS_URL
+from core.config import GEOSERVER_URL, GEOSERVER_USER, GEOSERVER_PASS, QGIS_URL, MAIN_API_URL, SERVICE_KEY
router = APIRouter()
@@ -109,17 +109,10 @@ def cleansing_data(table_name: str, job_id: str):
async def job_callback(payload: dict):
table = payload["table"]
job_id = payload["job_id"]
- # await asyncio.sleep(10)
-
- await report_progress(job_id, "cleansing", 50, "Cleansing data selesai")
- # await asyncio.sleep(5)
geos_link = publish_layer_to_geoserver(table, job_id)
- await report_progress(job_id, "publish_geoserver", 80, "Publish GeoServer selesai")
- # await asyncio.sleep(3)
uuid = publish_metadata(table_name=table, geoserver_links=geos_link)
- await report_progress(job_id, "done", 100, "Publish GeoNetwork selesai")
update_job_status(table, "FINISHED", job_id)
return {
@@ -214,60 +207,98 @@ def get_style(style_name: str, workspace: str = None):
# cleansing query
# =============================================================
+# async def query_cleansing_data_fn(
+# table_name: str
+# ):
+# try:
+# async with engine.begin() as conn:
+# await conn.execute(
+# text("SELECT public.fn_cleansing_satupeta_polygon(:table_name)"),
+# {"table_name": table_name}
+# )
+# return "done"
+
+# except SQLAlchemyError as e:
+# raise RuntimeError(f"Fix geometry failed: {str(e)}")
+
async def query_cleansing_data(
table_name: str
):
try:
async with engine.begin() as conn:
await conn.execute(
- text("SELECT public.fn_cleansing_satupeta_polygon(:table_name)"),
+ text("CALL pr_cleansing_satupeta_polygon(:table_name, NULL);"),
{"table_name": table_name}
)
return "done"
except SQLAlchemyError as e:
raise RuntimeError(f"Fix geometry failed: {str(e)}")
-
-async def publish_layer(table_name: str, job_id: str):
- # await asyncio.sleep(10)
-
- await report_progress(job_id, "cleansing", 50, "Cleansing data selesai")
- # await asyncio.sleep(5)
- geos_link = publish_layer_to_geoserver(table_name, job_id)
- await report_progress(job_id, "publish_geoserver", 80, "Publish GeoServer selesai")
- # await asyncio.sleep(3)
-
- uuid = publish_metadata(table_name=table_name, geoserver_links=geos_link)
- await report_progress(job_id, "done", 100, "Publish GeoNetwork selesai")
-
- update_job_status(table_name, "FINISHED", job_id)
- return {
- "ok": True,
- "uuid": uuid
- }
-
async def publish_layer(table_name: str, job_id: str):
- # await asyncio.sleep(10)
try:
- await report_progress(job_id, "cleansing", 50, "Cleansing data selesai")
- # await asyncio.sleep(5)
geos_link = publish_layer_to_geoserver(table_name, job_id)
- await report_progress(job_id, "publish_geoserver", 80, "Publish GeoServer selesai")
- # await asyncio.sleep(3)
uuid = publish_metadata(
table_name=table_name,
geoserver_links=geos_link
)
- await report_progress(job_id, "done", 100, "Publish GeoNetwork selesai")
update_job_status(table_name, "FINISHED", job_id)
- return uuid
+ # return uuid
+ return {
+ "geos_link": geos_link["layer_url"],
+ "uuid": uuid
+ }
except Exception as e:
update_job_status(table_name, "FAILED", job_id)
raise RuntimeError(f"Publish layer gagal: {e}") from e
+
+
+
+
+
+
+
+async def upload_to_main(payload: dict):
+ try:
+ async with httpx.AsyncClient(timeout=10) as client:
+ response = await client.post(
+ MAIN_API_URL+"/api/internal/mapsets",
+ json=payload,
+ headers={
+ "X-SERVICE-KEY": SERVICE_KEY
+ }
+ )
+ response.raise_for_status()
+
+ print("GOOOOO")
+ return {
+ "status": "success",
+ "data": response.json()
+ }
+
+ except httpx.RequestError as e:
+ # error koneksi, DNS, timeout, dll
+ raise HTTPException(
+ status_code=500,
+ detail=f"Gagal connect ke MAIN_API: {str(e)}"
+ )
+
+ except httpx.HTTPStatusError as e:
+ # API tujuan balas tapi error (4xx / 5xx)
+ raise HTTPException(
+ status_code=e.response.status_code,
+ detail=f"MAIN_API error: {e.response.text}"
+ )
+
+ except Exception as e:
+ # fallback kalau ada error lain
+ raise HTTPException(
+ status_code=500,
+ detail=f"Unexpected error: {str(e)}"
+ )
\ No newline at end of file
diff --git a/api/routers/ws/manager.py b/api/routers/ws/manager.py
deleted file mode 100644
index 5833059..0000000
--- a/api/routers/ws/manager.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from typing import Dict, List
-from fastapi import WebSocket
-
-class JobWSManager:
- def __init__(self):
- self.connections: Dict[str, List[WebSocket]] = {}
-
- async def connect(self, job_id: str, ws: WebSocket):
- await ws.accept()
- self.connections.setdefault(job_id, []).append(ws)
-
- def disconnect(self, job_id: str, ws: WebSocket):
- if job_id in self.connections:
- self.connections[job_id].remove(ws)
- if not self.connections[job_id]:
- del self.connections[job_id]
-
- async def send(self, job_id: str, data: dict):
- for ws in self.connections.get(job_id, []):
- await ws.send_json(data)
-
-
-manager = JobWSManager()
diff --git a/api/routers/ws/upload_progress_ws.py b/api/routers/ws/upload_progress_ws.py
deleted file mode 100644
index 746472d..0000000
--- a/api/routers/ws/upload_progress_ws.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from fastapi import APIRouter, WebSocket, WebSocketDisconnect
-from services.upload_file.upload_ws import job_state
-from api.routers.ws.manager import manager
-
-router = APIRouter()
-
-@router.websocket("/ws/test")
-async def ws_test(ws: WebSocket):
- await ws.accept()
- await ws.send_text("OK")
-
-
-@router.websocket("/ws/job/{job_id}")
-async def ws_job(job_id: str, ws: WebSocket):
- await manager.connect(job_id, ws)
-
- # kirim progress terakhir (jika reconnect)
- if job_id in job_state:
- await ws.send_json(job_state[job_id])
-
- try:
- while True:
- await ws.receive_text() # keep alive
- except WebSocketDisconnect:
- manager.disconnect(job_id, ws)
diff --git a/core/config.py b/core/config.py
index 3de9c18..315b3d3 100644
--- a/core/config.py
+++ b/core/config.py
@@ -6,6 +6,9 @@ load_dotenv()
API_VERSION = "2.1.3"
+MAIN_API_URL = os.getenv("MAIN_API_URL")
+SERVICE_KEY = os.getenv("SERVICE_KEY")
+
POSTGIS_URL = os.getenv("POSTGIS_URL")
POSTGIS_SYNC_URL = os.getenv("SYNC_URL")
diff --git a/database/models.py b/database/models.py
deleted file mode 100644
index 27b297d..0000000
--- a/database/models.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from sqlalchemy import Column, Integer, String, Text, ForeignKey, DateTime, TIMESTAMP
-from sqlalchemy.orm import relationship
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.sql import func
-
-Base = declarative_base()
-
-class UploadLog(Base):
- __tablename__ = "upload_logs"
- id = Column(Integer, primary_key=True, index=True)
- filename = Column(String, nullable=False)
- table_name = Column(String, nullable=False)
- file_type = Column(String, nullable=False)
- rows_count = Column(Integer)
- uploaded_at = Column(TIMESTAMP, server_default=func.now())
- status = Column(String)
- message = Column(Text)
-
-
-class Organization(Base):
- __tablename__ = "organizations"
-
- id = Column(Integer, primary_key=True, index=True)
- name = Column(String(100), unique=True, nullable=False)
- address = Column(String(200), nullable=True)
-
- users = relationship("User", back_populates="organization")
-
-
-class User(Base):
- __tablename__ = "users"
-
- id = Column(Integer, primary_key=True, index=True)
- username = Column(String(50), unique=True, nullable=False)
- password_hash = Column(String(255), nullable=False)
- role = Column(String(50), nullable=False, default="user") # <── Added role
- organization_id = Column(Integer, ForeignKey("organizations.id"), nullable=True)
- active_token = Column(String(255), nullable=True)
- token_expired_at = Column(DateTime, nullable=True)
- last_login = Column(DateTime, nullable=True)
-
- organization = relationship("Organization", back_populates="users")
\ No newline at end of file
diff --git a/main.py b/main.py
index 3dbf0f5..8ee7e4d 100644
--- a/main.py
+++ b/main.py
@@ -1,13 +1,9 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from core.config import API_VERSION, ALLOWED_ORIGINS
-from database.connection import engine
-from database.models import Base
from api.routers.system_router import router as system_router
from api.routers.upload_file_router import router as upload_router
-from api.routers.auth_router import router as auth_router
from api.routers.datasets_router import router as dataset_router
-from api.routers.ws.upload_progress_ws import router as ws_router
# from contextlib import asynccontextmanager
# from utils.qgis_init import init_qgis
@@ -57,8 +53,6 @@ app.add_middleware(
# }
# Register routers
-app.include_router(ws_router)
app.include_router(system_router, tags=["System"])
-app.include_router(auth_router, prefix="/auth", tags=["Auth"])
app.include_router(upload_router, prefix="/upload", tags=["Upload"])
app.include_router(dataset_router, prefix="/dataset", tags=["Upload"])
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 72d185a..e670dc1 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,35 +1,16 @@
-fastapi
-uvicorn[standard]
-pandas
-numpy
-geopandas
-shapely
-fiona
-pyproj
-SQLAlchemy
-sqlalchemy
-psycopg2-binary
-rapidfuzz
-pdfplumber
-zipfile36
-python-dotenv
-pydantic
-python-multipart
-aiofiles
-starlette
-openpyxl
-requests
-pathlib
-pyarrow
-geoalchemy2
-asyncpg
-py7zr
-bcrypt==4.0.1
-passlib == 1.7.4
-urllib3<2
-osgeo
-websockets
-
-
-# --- jika menggunakan ai ---
-groq
+fastapi==0.128.0
+fiona==1.10.1
+geopandas==1.1.2
+httpx==0.28.1
+numpy==2.4.1
+pandas==3.0.0
+pdfplumber==0.11.9
+py7zr==1.1.2
+pydantic==2.12.5
+pyogrio==0.12.1
+pyproj==3.7.2
+python-dotenv==1.2.1
+rapidfuzz==3.14.3
+Requests==2.32.5
+Shapely==2.1.2
+SQLAlchemy==2.0.46
\ No newline at end of file
diff --git a/services/auth/login.py b/services/auth/login.py
deleted file mode 100644
index 1201545..0000000
--- a/services/auth/login.py
+++ /dev/null
@@ -1,49 +0,0 @@
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy.future import select
-from passlib.context import CryptContext
-from uuid import uuid4
-from datetime import datetime, timedelta
-from database.connection import SessionLocal
-from database.models import User
-from response import successRes, errorRes
-
-async def get_db():
- async with SessionLocal() as session:
- yield session
-
-pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
-
-async def loginService(username: str, password: str, db: AsyncSession):
- result = await db.execute(select(User).where(User.username == username))
- user = result.scalar_one_or_none()
-
- if not user:
- raise errorRes(status_code=401, message="Invalid username or password")
-
- # Verify password
- if not pwd_context.verify(password, user.password_hash):
- raise errorRes(status_code=401, message="Invalid username or password")
-
- # Validation for organization user
- if user.role != "admin" and not user.organization_id:
- raise errorRes(status_code=403, message="User must belong to an organization")
-
- # Generate single active token
- token = str(uuid4())
- expiry = datetime.utcnow() + timedelta(hours=4)
-
- user.active_token = token
- user.token_expired_at = expiry
- user.last_login = datetime.utcnow()
- await db.commit()
-
- res = {
- "status": "success",
- "username": user.username,
- "role": user.role,
- "organization_id": user.organization_id,
- "token": token,
- "token_expired_at": expiry.isoformat()
- }
-
- return successRes(message="Success Login", data=res)
diff --git a/services/datasets/pub.py b/services/datasets/pub.py
index 06217ea..c58b975 100644
--- a/services/datasets/pub.py
+++ b/services/datasets/pub.py
@@ -93,17 +93,17 @@ def get_author_metadata(table_name: str):
sql = """
SELECT am.table_title, am.dataset_title, am.dataset_abstract, am.keywords, am.date_created,
- am.organization_name, am.contact_person_name, am.created_at,
- am.contact_email, am.contact_phone, am.geom_type,
- u.organization_id,
- o.address AS organization_address,
- o.email AS organization_email,
- o.phone_number AS organization_phone
- FROM backend.author_metadata AS am
- LEFT JOIN backend.users u ON am.user_id = u.id
- LEFT JOIN backend.organizations o ON u.organization_id = o.id
- WHERE am.table_title = :table
- LIMIT 1
+ am.organization_name, am.contact_person_name, am.created_at,
+ am.contact_email, am.contact_phone, am.geom_type,
+ u.organization_id,
+ o.address AS organization_address,
+ o.email AS organization_email,
+ o.phone_number AS organization_phone
+ FROM backend.author_metadata AS am
+ LEFT JOIN backend.users u ON am.user_id = u.id
+ LEFT JOIN backend.organizations o ON u.organization_id = o.id
+ WHERE am.table_title = :table
+ LIMIT 1
"""
conn = engine.connect()
@@ -157,413 +157,413 @@ def generate_metadata_xml(table_name, meta, extent, geoserver_links):
return f"""
-
- {uuid}
-
-
-
-
-
-
-
-
-
-
-
-
-
- {meta['contact_person_name']}
-
-
- {meta['organization_name']}
-
-
-
-
-
-
- {meta['organization_phone']}
-
-
- {meta['organization_phone']}
-
-
-
-
-
-
- {meta['organization_address']}
-
-
- Surabaya
-
-
- Jawa Timur
-
-
- Indonesia
-
-
- {meta['organization_email']}
-
-
-
-
- 08.00-16.00
-
-
-
-
-
-
-
-
-
- {datetime.utcnow().isoformat()}+07:00
-
-
- ISO 19115:2003/19139
-
-
- 1.0
-
-
-
-
-
-
-
-
-
- 38
-
-
-
-
-
-
-
-
-
-
- 4326
-
-
- EPSG
-
-
-
-
-
-
-
-
-
-
- {meta['dataset_title']}
-
-
-
-
- {meta['created_at'].isoformat()}+07:00
-
-
-
-
-
-
-
- {meta['date_created'].year}
-
-
-
-
- {meta['contact_person_name']}
-
-
- {meta['organization_name']}
-
-
-
-
-
-
- {meta['organization_phone']}
-
-
- {meta['organization_phone']}
-
-
-
-
-
-
- {meta['organization_address']}
-
-
- Surabaya
-
-
- Indonesia
-
-
- {meta['organization_email']}
-
-
-
-
- 08.00-16.00
-
-
-
-
-
-
-
-
-
- Timezone: UTC+7 (Asia/Jakarta)
-
-
-
-
- {meta['dataset_abstract']}
-
-
- {meta['dataset_abstract']}
-
-
-
-
-
-
-
- Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur
-
-
- Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur
-
-
-
-
-
-
-
- {meta['organization_phone']}
-
-
- {meta['organization_phone']}
-
-
-
-
-
-
- {meta['organization_address']}
-
-
- Surabaya
-
-
- Jawa Timur
-
-
- Indonesia
-
-
- {meta['organization_email']}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {keywords_xml}
-
-
-
-
-
-
-
-
-
-
-
- Penggunaan data harus mencantumkan sumber: {meta['organization_name']}.
-
-
-
-
-
-
-
-
-
-
-
- 25000
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {extent['xmin']}
- {extent['xmax']}
- {extent['ymin']}
- {extent['ymax']}
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
- {meta['dataset_title']}
-
-
-
-
- {meta['created_at'].isoformat()}+07:00
-
-
-
-
-
-
-
- {meta['date_created'].year}
-
-
-
-
-
-
-
-
-
-
-
-
- {geoserver_links["wms_url"]}
-
-
- DB:POSTGIS
+ xmlns:gmd="http://www.isotc211.org/2005/gmd"
+ xmlns:gco="http://www.isotc211.org/2005/gco"
+ xmlns:srv="http://www.isotc211.org/2005/srv"
+ xmlns:gmx="http://www.isotc211.org/2005/gmx"
+ xmlns:gts="http://www.isotc211.org/2005/gts"
+ xmlns:gsr="http://www.isotc211.org/2005/gsr"
+ xmlns:gmi="http://www.isotc211.org/2005/gmi"
+ xmlns:gml="http://www.opengis.net/gml/3.2"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.isotc211.org/2005/gmd http://schemas.opengis.net/csw/2.0.2/profiles/apiso/1.0.0/apiso.xsd">
+
+ {uuid}
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {meta['contact_person_name']}
+
+
+ {meta['organization_name']}
+
+
+
+
+
+
+ {meta['organization_phone']}
+
+
+ {meta['organization_phone']}
+
+
+
+
+
+
+ {meta['organization_address']}
+
+
+ Surabaya
+
+
+ Jawa Timur
+
+
+ Indonesia
+
+
+ {meta['organization_email']}
+
+
+
+
+ 08.00-16.00
+
+
+
+
+
+
+
+
+
+ {datetime.utcnow().isoformat()}+07:00
+
+
+ ISO 19115:2003/19139
+
+
+ 1.0
+
+
+
+
+
+
+
+
+
+ 38
+
+
+
+
+
+
+
+
+
+
+ 4326
+
+
+ EPSG
+
+
+
+
+
+
+
+
+
+
+ {meta['dataset_title']}
+
+
+
+
+ {meta['created_at'].isoformat()}+07:00
+
+
+
+
+
+
+
+ {meta['date_created'].year}
+
+
+
+
+ {meta['contact_person_name']}
+
+
+ {meta['organization_name']}
+
+
+
+
+
+
+ {meta['organization_phone']}
+
+
+ {meta['organization_phone']}
+
+
+
+
+
+
+ {meta['organization_address']}
+
+
+ Surabaya
+
+
+ Indonesia
+
+
+ {meta['organization_email']}
+
+
+
+
+ 08.00-16.00
+
+
+
+
+
+
+
+
+
+ Timezone: UTC+7 (Asia/Jakarta)
+
+
+
+
+ {meta['dataset_abstract']}
+
+
+ {meta['dataset_abstract']}
+
+
+
+
+
+
+
+ Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur
+
+
+ Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur
+
+
+
+
+
+
+
+ {meta['organization_phone']}
+
+
+ {meta['organization_phone']}
+
+
+
+
+
+
+ {meta['organization_address']}
+
+
+ Surabaya
+
+
+ Jawa Timur
+
+
+ Indonesia
+
+
+ {meta['organization_email']}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {keywords_xml}
+
+
+
+
+
+
+
+
+
+
+
+ Penggunaan data harus mencantumkan sumber: {meta['organization_name']}.
+
+
+
+
+
+
+
+
+
+
+
+ 25000
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {extent['xmin']}
+ {extent['xmax']}
+ {extent['ymin']}
+ {extent['ymax']}
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+ {meta['dataset_title']}
+
+
+
+
+ {meta['created_at'].isoformat()}+07:00
+
+
+
+
+
+
+
+ {meta['date_created'].year}
+
+
+
+
+
+
+
+
+
+
+
+
+ {geoserver_links["wms_url"]}
+
+
+ DB:POSTGIS
-
- {meta["dataset_title"]}
-
-
- {meta["dataset_title"]}
-
-
-
-
-
-
- {geoserver_links["wms_url"]}
-
-
- WWW:LINK-1.0-http--link
-
-
- {meta["dataset_title"]}
-
-
- {meta["dataset_title"]}
-
-
-
-
-
-
- {geoserver_links["wms_url"]}
-
-
- OGC:WMS
-
-
- {meta["dataset_title"]}
-
-
-
+
+ {meta["dataset_title"]}
+
+
+ {meta["dataset_title"]}
+
+
+
+
+
+
+ {geoserver_links["wms_url"]}
+
+
+ WWW:LINK-1.0-http--link
+
+
+ {meta["dataset_title"]}
+
+
+ {meta["dataset_title"]}
+
+
+
+
+
+
+ {geoserver_links["wms_url"]}
+
+
+ OGC:WMS
+
+
+ {meta["dataset_title"]}
+
+
+
-
-
-
- {geoserver_links["wfs_url"]}
-
-
- OGC:WFS
-
-
- {meta["dataset_title"]}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Data dihasilkan dari digitasi peta dasar skala 1:25000 menggunakan QGIS.
-
-
-
-
-
+
+
+
+ {geoserver_links["wfs_url"]}
+
+
+ OGC:WFS
+
+
+ {meta["dataset_title"]}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Data dihasilkan dari digitasi peta dasar skala 1:25000 menggunakan QGIS.
+
+
+
+
+
"""
@@ -584,7 +584,7 @@ def upload_metadata_to_geonetwork(xml_metadata: str):
# if not xsrf_token:
# raise HTTPException(status_code=500, detail="Could not retrieve XSRF-TOKEN from GeoNetwork.")
- session, xsrf_token = create_gn_session()
+ session, xsrf_token = create_gn_session()
headers = {
'X-XSRF-TOKEN': xsrf_token,
@@ -599,9 +599,9 @@ def upload_metadata_to_geonetwork(xml_metadata: str):
}
params = {
- "ownerGroup": 1, # all
- "ownerUser": 1 # admin
- }
+ "ownerGroup": 1, # all
+ "ownerUser": 1 # admin
+ }
response = session.post(
GN_API_RECORDS_URL,
@@ -623,7 +623,7 @@ def upload_metadata_to_geonetwork(xml_metadata: str):
record = publish_record(session, uuid)
print('[record]', record)
- # print("response", response.json())
+ # print("response", response.json())
return response.json()
diff --git a/services/datasets/publish_geonetwork.py b/services/datasets/publish_geonetwork.py
index decca62..7b781b5 100644
--- a/services/datasets/publish_geonetwork.py
+++ b/services/datasets/publish_geonetwork.py
@@ -348,10 +348,10 @@ def generate_metadata_xml(table_name, meta, extent, geoserver_links):
- Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur
+ Lab AI Polinema
- Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur
+ Lab AI Polinema
diff --git a/services/datasets/publish_geoserver.py b/services/datasets/publish_geoserver.py
index 833d8f3..3058c42 100644
--- a/services/datasets/publish_geoserver.py
+++ b/services/datasets/publish_geoserver.py
@@ -150,14 +150,29 @@ def publish_layer_to_geoserver(table: str, job_id: str):
f"{GEOSERVER_URL}/{GEOSERVER_WORKSPACE}/wfs?"
f"service=WFS&request=GetFeature&typeName={GEOSERVER_WORKSPACE}:{table}"
)
- print(f"[GeoServer] WMS URL: {wms_link}")
- print(f"[GeoServer] WFS URL: {wfs_link}")
- print(f"[GeoServer] Reload completed. Layer {table} ready.")
+ # print(f"[GeoServer] WMS URL: {wms_link}")
+ # print(f"[GeoServer] WFS URL: {wfs_link}")
+ # print(f"[GeoServer] Reload completed. Layer {table} ready.")
+ openlayer_url = (
+ f"{GEOSERVER_URL}/{GEOSERVER_WORKSPACE}/wms?"
+ f"service=WMS"
+ f"&version=1.1.0"
+ f"&request=GetMap"
+ f"&layers={GEOSERVER_WORKSPACE}:{table}"
+ f"&styles="
+ f"&bbox=110.89528623700005%2C-8.780412043999945%2C116.26994997700001%2C-5.042971664999925"
+ f"&width=768"
+ f"&height=384"
+ f"&srs=EPSG:4326"
+ f"&format=application/openlayers"
+ )
+
return {
"table": table,
"style": style_name,
"wms_url": wms_link,
- "wfs_url": wfs_link
+ "wfs_url": wfs_link,
+ "layer_url": openlayer_url
}
diff --git a/services/upload_file/upload.py b/services/upload_file/upload.py
index 1729425..cbc8fe5 100644
--- a/services/upload_file/upload.py
+++ b/services/upload_file/upload.py
@@ -11,8 +11,8 @@ from pyproj import CRS
from shapely.geometry.base import BaseGeometry
from shapely.geometry import base as shapely_base
from fastapi import Depends, File, Form, UploadFile, HTTPException
-from api.routers.datasets_router import cleansing_data, publish_layer, query_cleansing_data
-from core.config import UPLOAD_FOLDER, MAX_FILE_MB, VALID_WKT_PREFIXES
+from api.routers.datasets_router import cleansing_data, publish_layer, query_cleansing_data, upload_to_main
+from core.config import UPLOAD_FOLDER, MAX_FILE_MB, VALID_WKT_PREFIXES, GEONETWORK_URL
from services.upload_file.ai_generate import send_metadata
from services.upload_file.readers.reader_csv import read_csv
from services.upload_file.readers.reader_shp import read_shp
@@ -20,9 +20,7 @@ from services.upload_file.readers.reader_gdb import read_gdb
from services.upload_file.readers.reader_mpk import read_mpk
from services.upload_file.readers.reader_pdf import convert_df, read_pdf
from services.upload_file.utils.geometry_detector import detect_and_build_geometry, attach_polygon_geometry_auto
-from services.upload_file.upload_ws import report_progress
from database.connection import engine, sync_engine
-from database.models import Base
from pydantic import BaseModel
from typing import Any, Dict, List, Optional
from shapely import MultiLineString, MultiPolygon, wkt
@@ -306,8 +304,8 @@ def process_data(df: pd.DataFrame, ext: str, filename: str, fileDesc: str):
# "kategori_mapset": ""
# }
}
- # ai_suggest = send_metadata(ai_context)
- ai_suggest = {'judul': 'Peta Risiko Letusan Gunung Arjuna di Provinsi Jawa Timur', 'abstrak': 'Peta ini menggambarkan wilayah berisiko letusan Gunung Arjuna yang berada di Provinsi Jawa Timur. Data disajikan dalam bentuk poligon yang menunjukkan zona risiko berdasarkan analisis potensi aktivitas vulkanik.', 'tujuan': 'Data dapat digunakan untuk perencanaan mitigasi bencana dan pengambilan keputusan di wilayah Jawa Timur.', 'keyword': ['Risiko letusan', 'Gunung Arjuna', 'Bencana alam', 'Provinsi Jawa Timur', 'Geologi'], 'kategori': ['Geoscientific information', 'Environment'], 'kategori_mapset': 'Lingkungan Hidup'}
+ ai_suggest = send_metadata(ai_context)
+ # ai_suggest = {'judul': 'Peta Risiko Letusan Gunung Arjuna di Provinsi Jawa Timur', 'abstrak': 'Peta ini menggambarkan wilayah berisiko letusan Gunung Arjuna yang berada di Provinsi Jawa Timur. Data disajikan dalam bentuk poligon yang menunjukkan zona risiko berdasarkan analisis potensi aktivitas vulkanik.', 'tujuan': 'Data dapat digunakan untuk perencanaan mitigasi bencana dan pengambilan keputusan di wilayah Jawa Timur.', 'keyword': ['Risiko letusan', 'Gunung Arjuna', 'Bencana alam', 'Provinsi Jawa Timur', 'Geologi'], 'kategori': ['Geoscientific information', 'Environment'], 'kategori_mapset': 'Lingkungan Hidup'}
# print(ai_suggest)
response = {
@@ -509,223 +507,279 @@ def save_xml_to_sld(xml_string, filename):
-# async def handle_to_postgis(payload: UploadRequest, user_id: int = 2):
-# try:
-# table_name = await generate_unique_table_name(payload.title)
-# # DataFrame
-# df = pd.DataFrame(payload.rows)
-# df.columns = [col.upper() for col in df.columns]
-# if "GEOMETRY" not in df.columns:
-# raise HTTPException(400, "Kolom GEOMETRY tidak ditemukan")
-
-# # =====================================================================
-# # 1. LOAD WKT → SHAPELY
-# # =====================================================================
-# def safe_load_wkt(g):
-# if not isinstance(g, str):
-# return None
-# try:
-# geom = wkt.loads(g)
-# return geom
-# except:
-# return None
-
-# df["GEOMETRY"] = df["GEOMETRY"].apply(safe_load_wkt)
-# df = df.rename(columns={"GEOMETRY": "geom"})
-
-# # =====================================================================
-# # 2. DROP ROW geometry NULL
-# # =====================================================================
-# df = df[df["geom"].notnull()]
-# if df.empty:
-# raise HTTPException(400, "Semua geometry invalid atau NULL")
-
-# # =====================================================================
-# # 3. VALIDATE geometry (very important)
-# # =====================================================================
-# df["geom"] = df["geom"].apply(lambda g: g if g.is_valid else g.buffer(0))
-
-# # =====================================================================
-# # 4. SERAGAMKAN TIPE GEOMETRY (Polygon→MultiPolygon, Line→MultiLine)
-# # =====================================================================
-# def unify_geometry_type(g):
-# gtype = g.geom_type.upper()
-# if gtype == "POLYGON":
-# return MultiPolygon([g])
-# if gtype == "LINESTRING":
-# return MultiLineString([g])
-# return g # sudah MULTI atau POINT
-# df["geom"] = df["geom"].apply(unify_geometry_type)
-
-# # =====================================================================
-# # 5. DETEKSI CRS DARI METADATA / INPUT / DEFAULT
-# # =====================================================================
-# detected_crs = payload.author.get("crs")
-
-# detected = payload.author.get("crs")
-# print('crs', detected)
-
-# if not detected_crs:
-# detected_crs = "EPSG:4326"
-
-# detected_crs = 'EPSG:4326'
-# # Buat GeoDataFrame
-# gdf = gpd.GeoDataFrame(df, geometry="geom", crs=detected_crs)
-# row_count = len(gdf)
-
-# # =====================================================================
-# # 6. VERIFY CRS (SRID) VALID di PROJ / PostGIS
-# # =====================================================================
-# try:
-# _ = gdf.to_crs(gdf.crs) # test CRS valid
-# except:
-# raise HTTPException(400, f"CRS {detected_crs} tidak valid")
-
-# # =====================================================================
-# # 7. SIMPAN KE POSTGIS (synchronous)
-# # =====================================================================
-# loop = asyncio.get_running_loop()
-# await loop.run_in_executor(
-# None,
-# lambda: gdf.to_postgis(
-# table_name,
-# sync_engine,
-# if_exists="replace",
-# index=False
-# )
-# )
-
-# # =====================================================================
-# # 8. ADD PRIMARY KEY (wajib untuk QGIS API)
-# # =====================================================================
-# async with engine.begin() as conn:
-# await conn.execute(text(
-# f'ALTER TABLE "{table_name}" ADD COLUMN _ID SERIAL PRIMARY KEY;'
-# ))
-
-# # =====================================================================
-# # 9. SIMPAN METADATA (geom_type, author metadata)
-# # =====================================================================
-# unified_geom_type = list(gdf.geom_type.unique())
-# author = payload.author
-# async with engine.begin() as conn:
-# await conn.execute(text("""
-# INSERT INTO backend.author_metadata (
-# table_title,
-# dataset_title,
-# dataset_abstract,
-# keywords,
-# topic_category,
-# date_created,
-# dataset_status,
-# organization_name,
-# contact_person_name,
-# contact_email,
-# contact_phone,
-# geom_type,
-# user_id,
-# process,
-# geometry_count
-# ) VALUES (
-# :table_title,
-# :dataset_title,
-# :dataset_abstract,
-# :keywords,
-# :topic_category,
-# :date_created,
-# :dataset_status,
-# :organization_name,
-# :contact_person_name,
-# :contact_email,
-# :contact_phone,
-# :geom_type,
-# :user_id,
-# :process,
-# :geometry_count
-# )
-# """), {
-# "table_title": table_name,
-# "dataset_title": payload.title,
-# "dataset_abstract": author.get("abstract"),
-# "keywords": author.get("keywords"),
-# # "topic_category": author.get("topicCategory"),
-# "topic_category": ", ".join(author.get("topicCategory")),
-# "date_created": str_to_date(author.get("dateCreated")),
-# "dataset_status": author.get("status"),
-# "organization_name": author.get("organization"),
-# "contact_person_name": author.get("contactName"),
-# "contact_email": author.get("contactEmail"),
-# "contact_phone": author.get("contactPhone"),
-# "geom_type": json.dumps(unified_geom_type),
-# "user_id": user_id,
-# "process": 'CLEANSING',
-# "geometry_count": row_count
-# })
-
-
-# # =====================================================================
-# # 10. LOGGING
-# # =====================================================================
-# await log_activity(
-# user_id=user_id,
-# action_type="UPLOAD",
-# action_title=f"Upload dataset {table_name}",
-# details={"table_name": table_name, "rows": len(gdf)}
-# )
-
-# job_id = generate_job_id(str(user_id))
-# result = {
-# "job_id": job_id,
-# "job_status": "wait",
-# "table_name": table_name,
-# "status": "success",
-# "message": f"Tabel '{table_name}' berhasil dibuat.",
-# "total_rows": len(gdf),
-# "geometry_type": unified_geom_type,
-# "crs": detected_crs,
-# "metadata_uuid": ""
-# }
-# save_xml_to_sld(payload.style, job_id)
-
-# await report_progress(job_id, "upload", 20, "Upload selesai")
-# # cleansing_data(table_name, job_id)
-
-# cleansing = await query_cleansing_data(table_name)
-# result['job_status'] = cleansing
-
-# uuid = await publish_layer(table_name, job_id)
-# result['metadata_uuid'] = uuid
-
-# return successRes(data=result)
-
-# except Exception as e:
-# await log_activity(
-# user_id=user_id,
-# action_type="ERROR",
-# action_title="Upload gagal",
-# details={"error": str(e)}
-# )
-# raise HTTPException(status_code=500, detail=str(e))
-
async def handle_to_postgis(payload: UploadRequest, user_id: int = 2):
try:
+ table_name = await generate_unique_table_name(payload.title)
+ # DataFrame
+ df = pd.DataFrame(payload.rows)
+ df.columns = [col.upper() for col in df.columns]
+ if "GEOMETRY" not in df.columns:
+ raise HTTPException(400, "Kolom GEOMETRY tidak ditemukan")
+
+ # =====================================================================
+ # 1. LOAD WKT → SHAPELY
+ # =====================================================================
+ def safe_load_wkt(g):
+ if not isinstance(g, str):
+ return None
+ try:
+ geom = wkt.loads(g)
+ return geom
+ except:
+ return None
+
+ df["GEOMETRY"] = df["GEOMETRY"].apply(safe_load_wkt)
+ df = df.rename(columns={"GEOMETRY": "geom"})
+
+ # =====================================================================
+ # 2. DROP ROW geometry NULL
+ # =====================================================================
+ df = df[df["geom"].notnull()]
+ if df.empty:
+ raise HTTPException(400, "Semua geometry invalid atau NULL")
+
+ # =====================================================================
+ # 3. VALIDATE geometry (very important)
+ # =====================================================================
+ df["geom"] = df["geom"].apply(lambda g: g if g.is_valid else g.buffer(0))
+
+ # =====================================================================
+ # 4. SERAGAMKAN TIPE GEOMETRY (Polygon→MultiPolygon, Line→MultiLine)
+ # =====================================================================
+ def unify_geometry_type(g):
+ gtype = g.geom_type.upper()
+ if gtype == "POLYGON":
+ return MultiPolygon([g])
+ if gtype == "LINESTRING":
+ return MultiLineString([g])
+ return g # sudah MULTI atau POINT
+ df["geom"] = df["geom"].apply(unify_geometry_type)
+
+ # =====================================================================
+ # 5. DETEKSI CRS DARI METADATA / INPUT / DEFAULT
+ # =====================================================================
+ detected_crs = payload.author.get("crs")
+
+ detected = payload.author.get("crs")
+ print('crs', detected)
+
+ if not detected_crs:
+ detected_crs = "EPSG:4326"
+
+ detected_crs = 'EPSG:4326'
+ # Buat GeoDataFrame
+ gdf = gpd.GeoDataFrame(df, geometry="geom", crs=detected_crs)
+ row_count = len(gdf)
+
+ # =====================================================================
+ # 6. VERIFY CRS (SRID) VALID di PROJ / PostGIS
+ # =====================================================================
+ try:
+ _ = gdf.to_crs(gdf.crs) # test CRS valid
+ except:
+ raise HTTPException(400, f"CRS {detected_crs} tidak valid")
+
+ # =====================================================================
+ # 7. SIMPAN KE POSTGIS (synchronous)
+ # =====================================================================
+ loop = asyncio.get_running_loop()
+ await loop.run_in_executor(
+ None,
+ lambda: gdf.to_postgis(
+ table_name,
+ sync_engine,
+ if_exists="replace",
+ index=False
+ )
+ )
+
+ # =====================================================================
+ # 8. ADD PRIMARY KEY (wajib untuk QGIS API)
+ # =====================================================================
+ async with engine.begin() as conn:
+ await conn.execute(text(
+ f'ALTER TABLE "{table_name}" ADD COLUMN _ID SERIAL PRIMARY KEY;'
+ ))
+
+ # =====================================================================
+ # 9. SIMPAN METADATA (geom_type, author metadata)
+ # =====================================================================
+ unified_geom_type = list(gdf.geom_type.unique())
+ author = payload.author
+ async with engine.begin() as conn:
+ await conn.execute(text("""
+ INSERT INTO backend.author_metadata (
+ table_title,
+ dataset_title,
+ dataset_abstract,
+ keywords,
+ topic_category,
+ date_created,
+ dataset_status,
+ organization_name,
+ contact_person_name,
+ contact_email,
+ contact_phone,
+ geom_type,
+ user_id,
+ process,
+ geometry_count
+ ) VALUES (
+ :table_title,
+ :dataset_title,
+ :dataset_abstract,
+ :keywords,
+ :topic_category,
+ :date_created,
+ :dataset_status,
+ :organization_name,
+ :contact_person_name,
+ :contact_email,
+ :contact_phone,
+ :geom_type,
+ :user_id,
+ :process,
+ :geometry_count
+ )
+ """), {
+ "table_title": table_name,
+ "dataset_title": payload.title,
+ "dataset_abstract": author.get("abstract"),
+ "keywords": author.get("keywords"),
+ # "topic_category": author.get("topicCategory"),
+ "topic_category": ", ".join(author.get("topicCategory")),
+ "date_created": str_to_date(author.get("dateCreated")),
+ "dataset_status": author.get("status"),
+ "organization_name": author.get("organization"),
+ "contact_person_name": author.get("contactName"),
+ "contact_email": author.get("contactEmail"),
+ "contact_phone": author.get("contactPhone"),
+ "geom_type": json.dumps(unified_geom_type),
+ "user_id": user_id,
+ "process": 'CLEANSING',
+ "geometry_count": row_count
+ })
+
+
+ # =====================================================================
+ # 10. LOGGING
+ # =====================================================================
+ await log_activity(
+ user_id=user_id,
+ action_type="UPLOAD",
+ action_title=f"Upload dataset {table_name}",
+ details={"table_name": table_name, "rows": len(gdf)}
+ )
+
job_id = generate_job_id(str(user_id))
result = {
"job_id": job_id,
- "job_status": "done",
- "table_name": "just for test",
+ "job_status": "wait",
+ "table_name": table_name,
"status": "success",
- "message": f"Tabel test berhasil dibuat.",
- "total_rows": 10,
- "geometry_type": "Polygon",
- "crs": "EPSG 4326",
- "metadata_uuid": "-"
+ "message": f"Tabel '{table_name}' berhasil dibuat.",
+ "total_rows": len(gdf),
+ "geometry_type": unified_geom_type,
+ "crs": detected_crs,
+ "metadata_uuid": ""
}
+ save_xml_to_sld(payload.style, job_id)
+
+ # await report_progress(job_id, "upload", 20, "Upload selesai")
+ # cleansing_data(table_name, job_id)
+
+ cleansing = await query_cleansing_data(table_name)
+ result['job_status'] = cleansing
+
+ publish = await publish_layer(table_name, job_id)
+ result['metadata_uuid'] = publish['uuid']
+
+ mapset = {
+ "name": payload.title,
+ "description": author.get("abstract"),
+ "scale": "1:25000",
+ "projection_system_id": "0196c746-d1ba-7f1c-9706-5df738679cc7",
+ "category_id": author.get("mapsetCategory"),
+ "data_status": "sementara",
+ "classification_id": "01968b4b-d3f9-76c9-888c-ee887ac31ce4",
+ "producer_id": "019bd4ea-eb33-704e-83c3-8253d457b187",
+ "layer_type": unified_geom_type[0],
+ "source_id": ["019bd4e7-3df8-75c8-9b89-3f310967649c"],
+ "layer_url": publish['geos_link'],
+ "metadata_url": f"{GEONETWORK_URL}/srv/eng/catalog.search#/metadata/{publish['uuid']}",
+ "coverage_level": "provinsi",
+ "coverage_area": "kabupaten",
+ "data_update_period": "Tahunan",
+ "data_version": "2026",
+ "is_popular": False,
+ "is_active": True,
+ "regional_id": "01968b53-a910-7a67-bd10-975b8923b92e",
+ "notes": "Mapset baru dibuat",
+ "status_validation": "on_verification",
+ }
+
+ print("mapset data",mapset)
+ await upload_to_main(mapset)
return successRes(data=result)
except Exception as e:
- print("gtw")
+ await log_activity(
+ user_id=user_id,
+ action_type="ERROR",
+ action_title="Upload gagal",
+ details={"error": str(e)}
+ )
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+
+
+# async def handle_to_postgis(payload: UploadRequest, user_id: int = 2):
+# try:
+# job_id = generate_job_id(str(user_id))
+# result = {
+# "job_id": job_id,
+# "job_status": "done",
+# "table_name": "just for test",
+# "status": "success",
+# "message": f"Tabel test berhasil dibuat.",
+# "total_rows": 10,
+# "geometry_type": "Polygon",
+# "crs": "EPSG 4326",
+# "metadata_uuid": "-"
+# }
+
+# mapset = {
+# "name": "Resiko Letusan Gunung Arjuno",
+# "description": "Testing Automation Upload",
+# "scale": "1:25000",
+# "projection_system_id": "0196c746-d1ba-7f1c-9706-5df738679cc7",
+# "category_id": "0196c80c-855f-77f9-abd0-0c8a30b8c2f5",
+# "data_status": "sementara",
+# "classification_id": "01968b4b-d3f9-76c9-888c-ee887ac31ce4",
+# "producer_id": "019bd4ea-eb33-704e-83c3-8253d457b187",
+# "layer_type": "polygon",
+# "source_id": ["019bd4e7-3df8-75c8-9b89-3f310967649c"],
+# "layer_url": "http://192.168.60.24:8888/geoserver/wms?service=WMS&version=1.1.0&request=GetMap&layers=labai:risiko_letusan_gunung_arjuno_bromo&bbox=110.89528623700005,-8.780412043999945,116.26994997700001,-5.042971664999925&width=768&height=534&srs=EPSG:4326&styles=&format=application/openlayers",
+# "metadata_url": "http://192.168.60.24:7777/geonetwork/srv/eng/catalog.search#/metadata/9e5e2f09-13ef-49b5-bb49-1cb12136f63b",
+# "coverage_level": "provinsi",
+# "coverage_area": "kabupaten",
+# "data_update_period": "Tahunan",
+# "data_version": "2026",
+# "is_popular": False,
+# "is_active": True,
+# "regional_id": "01968b53-a910-7a67-bd10-975b8923b92e",
+# "notes": "Mapset baru dibuat",
+# "status_validation": "on_verification",
+# }
+
+# await upload_to_main(mapset)
+
+# return successRes(data=result)
+
+# except Exception as e:
+# print("errot", e)
diff --git a/services/upload_file/upload_ws.py b/services/upload_file/upload_ws.py
deleted file mode 100644
index 4c3727e..0000000
--- a/services/upload_file/upload_ws.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# app/jobs/progress.py
-from typing import Dict
-from api.routers.ws.manager import manager
-
-# state job (in-memory dulu)
-job_state: Dict[str, dict] = {}
-
-
-async def report_progress(
- job_id: str,
- step: str,
- progress: int,
- message: str
-):
- """
- Fungsi tunggal untuk update & broadcast progress job
- """
-
- job_state[job_id] = {
- "job_id": job_id,
- "step": step,
- "progress": progress,
- "message": message,
- }
-
- # push ke websocket
- await manager.send(job_id, job_state[job_id])
diff --git a/utils/qgis_init.py b/utils/qgis_init.py
deleted file mode 100644
index 9c27cb4..0000000
--- a/utils/qgis_init.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# utils/qgis_init.py
-import os
-import sys
-
-# Lokasi instalasi QGIS di Linux (Ubuntu / Debian)
-QGIS_PREFIX = "/usr"
-
-# Path modul Python QGIS
-sys.path.append("/usr/share/qgis/python")
-
-# Environment variable agar QGIS dapat berjalan headless (tanpa GUI)
-os.environ["QGIS_PREFIX_PATH"] = QGIS_PREFIX
-os.environ["QT_QPA_PLATFORM"] = "offscreen"
-
-from qgis.core import QgsApplication
-from qgis.analysis import QgsNativeAlgorithms
-import processing
-from processing.core.Processing import Processing
-
-
-def init_qgis():
- qgs = QgsApplication([], False)
- qgs.initQgis()
-
- # Register QGIS processing provider
- Processing.initialize()
- QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms())
-
- print("QGIS initialized successfully")
- return qgs