update structure

This commit is contained in:
DmsAnhr 2026-01-29 10:16:34 +07:00
parent 236b6f1241
commit ead61323a2
19 changed files with 794 additions and 981 deletions

BIN
.DS_Store vendored

Binary file not shown.

1
.gitignore vendored
View File

@ -18,5 +18,4 @@ logs/
style_temp/ style_temp/
services/styles/ services/styles/
cleansing_func.sql cleansing_func.sql

View File

@ -1,34 +0,0 @@
from fastapi import Depends, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from datetime import datetime
from response import errorRes
from database.connection import SessionLocal
from database.models import User
async def get_db():
async with SessionLocal() as session:
yield session
async def get_current_user(
authorization: str = Header(None),
db: AsyncSession = Depends(get_db)
):
if not authorization or not authorization.startswith("Bearer "):
raise errorRes(status_code=401, message="Missing or invalid token")
token = authorization.split(" ")[1]
result = await db.execute(select(User).where(User.active_token == token))
user = result.scalar_one_or_none()
# Case 1: Token not found → maybe replaced by new login
if not user:
raise errorRes(status_code=401, message="Token invalid or used by another login")
# Case 2: Token expired
if user.token_expired_at and user.token_expired_at < datetime.utcnow():
raise errorRes(status_code=401, message="Token expired")
return user

View File

@ -1,20 +0,0 @@
from fastapi import Depends, status
from api.deps.auth_dependency import get_current_user
from response import errorRes
def require_role(required_role: str):
"""
Return a dependency function that ensures the current user has a specific role.
Example usage:
@router.get("/admin", dependencies=[Depends(require_role("admin"))])
"""
async def role_checker(user = Depends(get_current_user)):
if user.role != required_role:
raise errorRes(
status_code=status.HTTP_403_FORBIDDEN,
message="Access denied",
detail=f"Access denied: requires role '{required_role}'",
)
return user
return role_checker

View File

@ -1,14 +0,0 @@
from fastapi import APIRouter, Depends
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncSession
from services.auth.login import loginService, get_db
router = APIRouter()
class LoginRequest(BaseModel):
username: str
password: str
@router.post("/login")
async def login(request: LoginRequest, db: AsyncSession = Depends(get_db)):
return await loginService(request.username, request.password, db)

View File

@ -1,6 +1,7 @@
import asyncio import asyncio
from uuid import uuid4 from uuid import uuid4
from fastapi import APIRouter, HTTPException from fastapi import APIRouter, HTTPException
import httpx
import requests import requests
from sqlalchemy import text from sqlalchemy import text
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import SQLAlchemyError
@ -10,8 +11,7 @@ from response import successRes, errorRes
from services.datasets.publish_geonetwork import publish_metadata from services.datasets.publish_geonetwork import publish_metadata
from services.datasets.publish_geoserver import publish_layer_to_geoserver from services.datasets.publish_geoserver import publish_layer_to_geoserver
from services.datasets.metadata import update_job_status from services.datasets.metadata import update_job_status
from services.upload_file.upload_ws import report_progress from core.config import GEOSERVER_URL, GEOSERVER_USER, GEOSERVER_PASS, QGIS_URL, MAIN_API_URL, SERVICE_KEY
from core.config import GEOSERVER_URL, GEOSERVER_USER, GEOSERVER_PASS, QGIS_URL
router = APIRouter() router = APIRouter()
@ -109,17 +109,10 @@ def cleansing_data(table_name: str, job_id: str):
async def job_callback(payload: dict): async def job_callback(payload: dict):
table = payload["table"] table = payload["table"]
job_id = payload["job_id"] job_id = payload["job_id"]
# await asyncio.sleep(10)
await report_progress(job_id, "cleansing", 50, "Cleansing data selesai")
# await asyncio.sleep(5)
geos_link = publish_layer_to_geoserver(table, job_id) geos_link = publish_layer_to_geoserver(table, job_id)
await report_progress(job_id, "publish_geoserver", 80, "Publish GeoServer selesai")
# await asyncio.sleep(3)
uuid = publish_metadata(table_name=table, geoserver_links=geos_link) uuid = publish_metadata(table_name=table, geoserver_links=geos_link)
await report_progress(job_id, "done", 100, "Publish GeoNetwork selesai")
update_job_status(table, "FINISHED", job_id) update_job_status(table, "FINISHED", job_id)
return { return {
@ -214,13 +207,27 @@ def get_style(style_name: str, workspace: str = None):
# cleansing query # cleansing query
# ============================================================= # =============================================================
# async def query_cleansing_data_fn(
# table_name: str
# ):
# try:
# async with engine.begin() as conn:
# await conn.execute(
# text("SELECT public.fn_cleansing_satupeta_polygon(:table_name)"),
# {"table_name": table_name}
# )
# return "done"
# except SQLAlchemyError as e:
# raise RuntimeError(f"Fix geometry failed: {str(e)}")
async def query_cleansing_data( async def query_cleansing_data(
table_name: str table_name: str
): ):
try: try:
async with engine.begin() as conn: async with engine.begin() as conn:
await conn.execute( await conn.execute(
text("SELECT public.fn_cleansing_satupeta_polygon(:table_name)"), text("CALL pr_cleansing_satupeta_polygon(:table_name, NULL);"),
{"table_name": table_name} {"table_name": table_name}
) )
return "done" return "done"
@ -228,46 +235,70 @@ async def query_cleansing_data(
except SQLAlchemyError as e: except SQLAlchemyError as e:
raise RuntimeError(f"Fix geometry failed: {str(e)}") raise RuntimeError(f"Fix geometry failed: {str(e)}")
async def publish_layer(table_name: str, job_id: str):
# await asyncio.sleep(10)
await report_progress(job_id, "cleansing", 50, "Cleansing data selesai")
# await asyncio.sleep(5)
geos_link = publish_layer_to_geoserver(table_name, job_id)
await report_progress(job_id, "publish_geoserver", 80, "Publish GeoServer selesai")
# await asyncio.sleep(3)
uuid = publish_metadata(table_name=table_name, geoserver_links=geos_link)
await report_progress(job_id, "done", 100, "Publish GeoNetwork selesai")
update_job_status(table_name, "FINISHED", job_id)
return {
"ok": True,
"uuid": uuid
}
async def publish_layer(table_name: str, job_id: str): async def publish_layer(table_name: str, job_id: str):
# await asyncio.sleep(10)
try: try:
await report_progress(job_id, "cleansing", 50, "Cleansing data selesai")
# await asyncio.sleep(5)
geos_link = publish_layer_to_geoserver(table_name, job_id) geos_link = publish_layer_to_geoserver(table_name, job_id)
await report_progress(job_id, "publish_geoserver", 80, "Publish GeoServer selesai")
# await asyncio.sleep(3)
uuid = publish_metadata( uuid = publish_metadata(
table_name=table_name, table_name=table_name,
geoserver_links=geos_link geoserver_links=geos_link
) )
await report_progress(job_id, "done", 100, "Publish GeoNetwork selesai")
update_job_status(table_name, "FINISHED", job_id) update_job_status(table_name, "FINISHED", job_id)
return uuid # return uuid
return {
"geos_link": geos_link["layer_url"],
"uuid": uuid
}
except Exception as e: except Exception as e:
update_job_status(table_name, "FAILED", job_id) update_job_status(table_name, "FAILED", job_id)
raise RuntimeError(f"Publish layer gagal: {e}") from e raise RuntimeError(f"Publish layer gagal: {e}") from e
async def upload_to_main(payload: dict):
try:
async with httpx.AsyncClient(timeout=10) as client:
response = await client.post(
MAIN_API_URL+"/api/internal/mapsets",
json=payload,
headers={
"X-SERVICE-KEY": SERVICE_KEY
}
)
response.raise_for_status()
print("GOOOOO")
return {
"status": "success",
"data": response.json()
}
except httpx.RequestError as e:
# error koneksi, DNS, timeout, dll
raise HTTPException(
status_code=500,
detail=f"Gagal connect ke MAIN_API: {str(e)}"
)
except httpx.HTTPStatusError as e:
# API tujuan balas tapi error (4xx / 5xx)
raise HTTPException(
status_code=e.response.status_code,
detail=f"MAIN_API error: {e.response.text}"
)
except Exception as e:
# fallback kalau ada error lain
raise HTTPException(
status_code=500,
detail=f"Unexpected error: {str(e)}"
)

View File

@ -1,23 +0,0 @@
from typing import Dict, List
from fastapi import WebSocket
class JobWSManager:
def __init__(self):
self.connections: Dict[str, List[WebSocket]] = {}
async def connect(self, job_id: str, ws: WebSocket):
await ws.accept()
self.connections.setdefault(job_id, []).append(ws)
def disconnect(self, job_id: str, ws: WebSocket):
if job_id in self.connections:
self.connections[job_id].remove(ws)
if not self.connections[job_id]:
del self.connections[job_id]
async def send(self, job_id: str, data: dict):
for ws in self.connections.get(job_id, []):
await ws.send_json(data)
manager = JobWSManager()

View File

@ -1,25 +0,0 @@
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
from services.upload_file.upload_ws import job_state
from api.routers.ws.manager import manager
router = APIRouter()
@router.websocket("/ws/test")
async def ws_test(ws: WebSocket):
await ws.accept()
await ws.send_text("OK")
@router.websocket("/ws/job/{job_id}")
async def ws_job(job_id: str, ws: WebSocket):
await manager.connect(job_id, ws)
# kirim progress terakhir (jika reconnect)
if job_id in job_state:
await ws.send_json(job_state[job_id])
try:
while True:
await ws.receive_text() # keep alive
except WebSocketDisconnect:
manager.disconnect(job_id, ws)

View File

@ -6,6 +6,9 @@ load_dotenv()
API_VERSION = "2.1.3" API_VERSION = "2.1.3"
MAIN_API_URL = os.getenv("MAIN_API_URL")
SERVICE_KEY = os.getenv("SERVICE_KEY")
POSTGIS_URL = os.getenv("POSTGIS_URL") POSTGIS_URL = os.getenv("POSTGIS_URL")
POSTGIS_SYNC_URL = os.getenv("SYNC_URL") POSTGIS_SYNC_URL = os.getenv("SYNC_URL")

View File

@ -1,42 +0,0 @@
from sqlalchemy import Column, Integer, String, Text, ForeignKey, DateTime, TIMESTAMP
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import func
Base = declarative_base()
class UploadLog(Base):
__tablename__ = "upload_logs"
id = Column(Integer, primary_key=True, index=True)
filename = Column(String, nullable=False)
table_name = Column(String, nullable=False)
file_type = Column(String, nullable=False)
rows_count = Column(Integer)
uploaded_at = Column(TIMESTAMP, server_default=func.now())
status = Column(String)
message = Column(Text)
class Organization(Base):
__tablename__ = "organizations"
id = Column(Integer, primary_key=True, index=True)
name = Column(String(100), unique=True, nullable=False)
address = Column(String(200), nullable=True)
users = relationship("User", back_populates="organization")
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True)
username = Column(String(50), unique=True, nullable=False)
password_hash = Column(String(255), nullable=False)
role = Column(String(50), nullable=False, default="user") # <── Added role
organization_id = Column(Integer, ForeignKey("organizations.id"), nullable=True)
active_token = Column(String(255), nullable=True)
token_expired_at = Column(DateTime, nullable=True)
last_login = Column(DateTime, nullable=True)
organization = relationship("Organization", back_populates="users")

View File

@ -1,13 +1,9 @@
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from core.config import API_VERSION, ALLOWED_ORIGINS from core.config import API_VERSION, ALLOWED_ORIGINS
from database.connection import engine
from database.models import Base
from api.routers.system_router import router as system_router from api.routers.system_router import router as system_router
from api.routers.upload_file_router import router as upload_router from api.routers.upload_file_router import router as upload_router
from api.routers.auth_router import router as auth_router
from api.routers.datasets_router import router as dataset_router from api.routers.datasets_router import router as dataset_router
from api.routers.ws.upload_progress_ws import router as ws_router
# from contextlib import asynccontextmanager # from contextlib import asynccontextmanager
# from utils.qgis_init import init_qgis # from utils.qgis_init import init_qgis
@ -57,8 +53,6 @@ app.add_middleware(
# } # }
# Register routers # Register routers
app.include_router(ws_router)
app.include_router(system_router, tags=["System"]) app.include_router(system_router, tags=["System"])
app.include_router(auth_router, prefix="/auth", tags=["Auth"])
app.include_router(upload_router, prefix="/upload", tags=["Upload"]) app.include_router(upload_router, prefix="/upload", tags=["Upload"])
app.include_router(dataset_router, prefix="/dataset", tags=["Upload"]) app.include_router(dataset_router, prefix="/dataset", tags=["Upload"])

View File

@ -1,35 +1,16 @@
fastapi fastapi==0.128.0
uvicorn[standard] fiona==1.10.1
pandas geopandas==1.1.2
numpy httpx==0.28.1
geopandas numpy==2.4.1
shapely pandas==3.0.0
fiona pdfplumber==0.11.9
pyproj py7zr==1.1.2
SQLAlchemy pydantic==2.12.5
sqlalchemy pyogrio==0.12.1
psycopg2-binary pyproj==3.7.2
rapidfuzz python-dotenv==1.2.1
pdfplumber rapidfuzz==3.14.3
zipfile36 Requests==2.32.5
python-dotenv Shapely==2.1.2
pydantic SQLAlchemy==2.0.46
python-multipart
aiofiles
starlette
openpyxl
requests
pathlib
pyarrow
geoalchemy2
asyncpg
py7zr
bcrypt==4.0.1
passlib == 1.7.4
urllib3<2
osgeo
websockets
# --- jika menggunakan ai ---
groq

View File

@ -1,49 +0,0 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from passlib.context import CryptContext
from uuid import uuid4
from datetime import datetime, timedelta
from database.connection import SessionLocal
from database.models import User
from response import successRes, errorRes
async def get_db():
async with SessionLocal() as session:
yield session
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
async def loginService(username: str, password: str, db: AsyncSession):
result = await db.execute(select(User).where(User.username == username))
user = result.scalar_one_or_none()
if not user:
raise errorRes(status_code=401, message="Invalid username or password")
# Verify password
if not pwd_context.verify(password, user.password_hash):
raise errorRes(status_code=401, message="Invalid username or password")
# Validation for organization user
if user.role != "admin" and not user.organization_id:
raise errorRes(status_code=403, message="User must belong to an organization")
# Generate single active token
token = str(uuid4())
expiry = datetime.utcnow() + timedelta(hours=4)
user.active_token = token
user.token_expired_at = expiry
user.last_login = datetime.utcnow()
await db.commit()
res = {
"status": "success",
"username": user.username,
"role": user.role,
"organization_id": user.organization_id,
"token": token,
"token_expired_at": expiry.isoformat()
}
return successRes(message="Success Login", data=res)

View File

@ -93,17 +93,17 @@ def get_author_metadata(table_name: str):
sql = """ sql = """
SELECT am.table_title, am.dataset_title, am.dataset_abstract, am.keywords, am.date_created, SELECT am.table_title, am.dataset_title, am.dataset_abstract, am.keywords, am.date_created,
am.organization_name, am.contact_person_name, am.created_at, am.organization_name, am.contact_person_name, am.created_at,
am.contact_email, am.contact_phone, am.geom_type, am.contact_email, am.contact_phone, am.geom_type,
u.organization_id, u.organization_id,
o.address AS organization_address, o.address AS organization_address,
o.email AS organization_email, o.email AS organization_email,
o.phone_number AS organization_phone o.phone_number AS organization_phone
FROM backend.author_metadata AS am FROM backend.author_metadata AS am
LEFT JOIN backend.users u ON am.user_id = u.id LEFT JOIN backend.users u ON am.user_id = u.id
LEFT JOIN backend.organizations o ON u.organization_id = o.id LEFT JOIN backend.organizations o ON u.organization_id = o.id
WHERE am.table_title = :table WHERE am.table_title = :table
LIMIT 1 LIMIT 1
""" """
conn = engine.connect() conn = engine.connect()
@ -157,413 +157,413 @@ def generate_metadata_xml(table_name, meta, extent, geoserver_links):
return f""" return f"""
<gmd:MD_Metadata <gmd:MD_Metadata
xmlns:gmd="http://www.isotc211.org/2005/gmd" xmlns:gmd="http://www.isotc211.org/2005/gmd"
xmlns:gco="http://www.isotc211.org/2005/gco" xmlns:gco="http://www.isotc211.org/2005/gco"
xmlns:srv="http://www.isotc211.org/2005/srv" xmlns:srv="http://www.isotc211.org/2005/srv"
xmlns:gmx="http://www.isotc211.org/2005/gmx" xmlns:gmx="http://www.isotc211.org/2005/gmx"
xmlns:gts="http://www.isotc211.org/2005/gts" xmlns:gts="http://www.isotc211.org/2005/gts"
xmlns:gsr="http://www.isotc211.org/2005/gsr" xmlns:gsr="http://www.isotc211.org/2005/gsr"
xmlns:gmi="http://www.isotc211.org/2005/gmi" xmlns:gmi="http://www.isotc211.org/2005/gmi"
xmlns:gml="http://www.opengis.net/gml/3.2" xmlns:gml="http://www.opengis.net/gml/3.2"
xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.isotc211.org/2005/gmd http://schemas.opengis.net/csw/2.0.2/profiles/apiso/1.0.0/apiso.xsd"> xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.isotc211.org/2005/gmd http://schemas.opengis.net/csw/2.0.2/profiles/apiso/1.0.0/apiso.xsd">
<gmd:fileIdentifier> <gmd:fileIdentifier>
<gco:CharacterString>{uuid}</gco:CharacterString> <gco:CharacterString>{uuid}</gco:CharacterString>
</gmd:fileIdentifier> </gmd:fileIdentifier>
<gmd:language> <gmd:language>
<gmd:LanguageCode codeList="http://www.loc.gov/standards/iso639-2/" codeListValue="eng"/> <gmd:LanguageCode codeList="http://www.loc.gov/standards/iso639-2/" codeListValue="eng"/>
</gmd:language> </gmd:language>
<gmd:characterSet> <gmd:characterSet>
<gmd:MD_CharacterSetCode codeListValue="utf8" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_CharacterSetCode"/> <gmd:MD_CharacterSetCode codeListValue="utf8" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_CharacterSetCode"/>
</gmd:characterSet> </gmd:characterSet>
<gmd:hierarchyLevel> <gmd:hierarchyLevel>
<gmd:MD_ScopeCode codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_ScopeCode" codeListValue="feature"/> <gmd:MD_ScopeCode codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_ScopeCode" codeListValue="feature"/>
</gmd:hierarchyLevel> </gmd:hierarchyLevel>
<gmd:contact> <gmd:contact>
<gmd:CI_ResponsibleParty> <gmd:CI_ResponsibleParty>
<gmd:individualName> <gmd:individualName>
<gco:CharacterString>{meta['contact_person_name']}</gco:CharacterString> <gco:CharacterString>{meta['contact_person_name']}</gco:CharacterString>
</gmd:individualName> </gmd:individualName>
<gmd:organisationName> <gmd:organisationName>
<gco:CharacterString>{meta['organization_name']}</gco:CharacterString> <gco:CharacterString>{meta['organization_name']}</gco:CharacterString>
</gmd:organisationName> </gmd:organisationName>
<gmd:contactInfo> <gmd:contactInfo>
<gmd:CI_Contact> <gmd:CI_Contact>
<gmd:phone> <gmd:phone>
<gmd:CI_Telephone> <gmd:CI_Telephone>
<gmd:voice> <gmd:voice>
<gco:CharacterString>{meta['organization_phone']}</gco:CharacterString> <gco:CharacterString>{meta['organization_phone']}</gco:CharacterString>
</gmd:voice> </gmd:voice>
<gmd:facsimile> <gmd:facsimile>
<gco:CharacterString>{meta['organization_phone']}</gco:CharacterString> <gco:CharacterString>{meta['organization_phone']}</gco:CharacterString>
</gmd:facsimile> </gmd:facsimile>
</gmd:CI_Telephone> </gmd:CI_Telephone>
</gmd:phone> </gmd:phone>
<gmd:address> <gmd:address>
<gmd:CI_Address> <gmd:CI_Address>
<gmd:deliveryPoint> <gmd:deliveryPoint>
<gco:CharacterString>{meta['organization_address']}</gco:CharacterString> <gco:CharacterString>{meta['organization_address']}</gco:CharacterString>
</gmd:deliveryPoint> </gmd:deliveryPoint>
<gmd:city> <gmd:city>
<gco:CharacterString>Surabaya</gco:CharacterString> <gco:CharacterString>Surabaya</gco:CharacterString>
</gmd:city> </gmd:city>
<gmd:administrativeArea> <gmd:administrativeArea>
<gco:CharacterString>Jawa Timur</gco:CharacterString> <gco:CharacterString>Jawa Timur</gco:CharacterString>
</gmd:administrativeArea> </gmd:administrativeArea>
<gmd:country> <gmd:country>
<gco:CharacterString>Indonesia</gco:CharacterString> <gco:CharacterString>Indonesia</gco:CharacterString>
</gmd:country> </gmd:country>
<gmd:electronicMailAddress> <gmd:electronicMailAddress>
<gco:CharacterString>{meta['organization_email']}</gco:CharacterString> <gco:CharacterString>{meta['organization_email']}</gco:CharacterString>
</gmd:electronicMailAddress> </gmd:electronicMailAddress>
</gmd:CI_Address> </gmd:CI_Address>
</gmd:address> </gmd:address>
<gmd:hoursOfService> <gmd:hoursOfService>
<gco:CharacterString>08.00-16.00</gco:CharacterString> <gco:CharacterString>08.00-16.00</gco:CharacterString>
</gmd:hoursOfService> </gmd:hoursOfService>
</gmd:CI_Contact> </gmd:CI_Contact>
</gmd:contactInfo> </gmd:contactInfo>
<gmd:role> <gmd:role>
<gmd:CI_RoleCode codeListValue="pointOfContact" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_RoleCode"/> <gmd:CI_RoleCode codeListValue="pointOfContact" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_RoleCode"/>
</gmd:role> </gmd:role>
</gmd:CI_ResponsibleParty> </gmd:CI_ResponsibleParty>
</gmd:contact> </gmd:contact>
<gmd:dateStamp> <gmd:dateStamp>
<gco:DateTime>{datetime.utcnow().isoformat()}+07:00</gco:DateTime> <gco:DateTime>{datetime.utcnow().isoformat()}+07:00</gco:DateTime>
</gmd:dateStamp> </gmd:dateStamp>
<gmd:metadataStandardName> <gmd:metadataStandardName>
<gco:CharacterString>ISO 19115:2003/19139</gco:CharacterString> <gco:CharacterString>ISO 19115:2003/19139</gco:CharacterString>
</gmd:metadataStandardName> </gmd:metadataStandardName>
<gmd:metadataStandardVersion> <gmd:metadataStandardVersion>
<gco:CharacterString>1.0</gco:CharacterString> <gco:CharacterString>1.0</gco:CharacterString>
</gmd:metadataStandardVersion> </gmd:metadataStandardVersion>
<gmd:spatialRepresentationInfo> <gmd:spatialRepresentationInfo>
<gmd:MD_VectorSpatialRepresentation> <gmd:MD_VectorSpatialRepresentation>
<gmd:geometricObjects> <gmd:geometricObjects>
<gmd:MD_GeometricObjects> <gmd:MD_GeometricObjects>
<gmd:geometricObjectType> <gmd:geometricObjectType>
<gmd:MD_GeometricObjectTypeCode codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_GeometricObjectTypeCode" codeListValue="{geom_type_code}"/> <gmd:MD_GeometricObjectTypeCode codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_GeometricObjectTypeCode" codeListValue="{geom_type_code}"/>
</gmd:geometricObjectType> </gmd:geometricObjectType>
<gmd:geometricObjectCount> <gmd:geometricObjectCount>
<gco:Integer>38</gco:Integer> <gco:Integer>38</gco:Integer>
</gmd:geometricObjectCount> </gmd:geometricObjectCount>
</gmd:MD_GeometricObjects> </gmd:MD_GeometricObjects>
</gmd:geometricObjects> </gmd:geometricObjects>
</gmd:MD_VectorSpatialRepresentation> </gmd:MD_VectorSpatialRepresentation>
</gmd:spatialRepresentationInfo> </gmd:spatialRepresentationInfo>
<gmd:referenceSystemInfo> <gmd:referenceSystemInfo>
<gmd:MD_ReferenceSystem> <gmd:MD_ReferenceSystem>
<gmd:referenceSystemIdentifier> <gmd:referenceSystemIdentifier>
<gmd:RS_Identifier> <gmd:RS_Identifier>
<gmd:code> <gmd:code>
<gco:CharacterString>4326</gco:CharacterString> <gco:CharacterString>4326</gco:CharacterString>
</gmd:code> </gmd:code>
<gmd:codeSpace> <gmd:codeSpace>
<gco:CharacterString>EPSG</gco:CharacterString> <gco:CharacterString>EPSG</gco:CharacterString>
</gmd:codeSpace> </gmd:codeSpace>
</gmd:RS_Identifier> </gmd:RS_Identifier>
</gmd:referenceSystemIdentifier> </gmd:referenceSystemIdentifier>
</gmd:MD_ReferenceSystem> </gmd:MD_ReferenceSystem>
</gmd:referenceSystemInfo> </gmd:referenceSystemInfo>
<gmd:identificationInfo> <gmd:identificationInfo>
<gmd:MD_DataIdentification> <gmd:MD_DataIdentification>
<gmd:citation> <gmd:citation>
<gmd:CI_Citation> <gmd:CI_Citation>
<gmd:title> <gmd:title>
<gco:CharacterString>{meta['dataset_title']}</gco:CharacterString> <gco:CharacterString>{meta['dataset_title']}</gco:CharacterString>
</gmd:title> </gmd:title>
<gmd:date> <gmd:date>
<gmd:CI_Date> <gmd:CI_Date>
<gmd:date> <gmd:date>
<gco:DateTime>{meta['created_at'].isoformat()}+07:00</gco:DateTime> <gco:DateTime>{meta['created_at'].isoformat()}+07:00</gco:DateTime>
</gmd:date> </gmd:date>
<gmd:dateType> <gmd:dateType>
<gmd:CI_DateTypeCode codeListValue="publication" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_DateTypeCode"/> <gmd:CI_DateTypeCode codeListValue="publication" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_DateTypeCode"/>
</gmd:dateType> </gmd:dateType>
</gmd:CI_Date> </gmd:CI_Date>
</gmd:date> </gmd:date>
<gmd:edition> <gmd:edition>
<gco:CharacterString>{meta['date_created'].year}</gco:CharacterString> <gco:CharacterString>{meta['date_created'].year}</gco:CharacterString>
</gmd:edition> </gmd:edition>
<gmd:citedResponsibleParty> <gmd:citedResponsibleParty>
<gmd:CI_ResponsibleParty> <gmd:CI_ResponsibleParty>
<gmd:individualName> <gmd:individualName>
<gco:CharacterString>{meta['contact_person_name']}</gco:CharacterString> <gco:CharacterString>{meta['contact_person_name']}</gco:CharacterString>
</gmd:individualName> </gmd:individualName>
<gmd:organisationName> <gmd:organisationName>
<gco:CharacterString>{meta['organization_name']}</gco:CharacterString> <gco:CharacterString>{meta['organization_name']}</gco:CharacterString>
</gmd:organisationName> </gmd:organisationName>
<gmd:contactInfo> <gmd:contactInfo>
<gmd:CI_Contact> <gmd:CI_Contact>
<gmd:phone> <gmd:phone>
<gmd:CI_Telephone> <gmd:CI_Telephone>
<gmd:voice> <gmd:voice>
<gco:CharacterString>{meta['organization_phone']}</gco:CharacterString> <gco:CharacterString>{meta['organization_phone']}</gco:CharacterString>
</gmd:voice> </gmd:voice>
<gmd:facsimile> <gmd:facsimile>
<gco:CharacterString>{meta['organization_phone']}</gco:CharacterString> <gco:CharacterString>{meta['organization_phone']}</gco:CharacterString>
</gmd:facsimile> </gmd:facsimile>
</gmd:CI_Telephone> </gmd:CI_Telephone>
</gmd:phone> </gmd:phone>
<gmd:address> <gmd:address>
<gmd:CI_Address> <gmd:CI_Address>
<gmd:deliveryPoint> <gmd:deliveryPoint>
<gco:CharacterString>{meta['organization_address']}</gco:CharacterString> <gco:CharacterString>{meta['organization_address']}</gco:CharacterString>
</gmd:deliveryPoint> </gmd:deliveryPoint>
<gmd:city> <gmd:city>
<gco:CharacterString>Surabaya</gco:CharacterString> <gco:CharacterString>Surabaya</gco:CharacterString>
</gmd:city> </gmd:city>
<gmd:country> <gmd:country>
<gco:CharacterString>Indonesia</gco:CharacterString> <gco:CharacterString>Indonesia</gco:CharacterString>
</gmd:country> </gmd:country>
<gmd:electronicMailAddress> <gmd:electronicMailAddress>
<gco:CharacterString>{meta['organization_email']}</gco:CharacterString> <gco:CharacterString>{meta['organization_email']}</gco:CharacterString>
</gmd:electronicMailAddress> </gmd:electronicMailAddress>
</gmd:CI_Address> </gmd:CI_Address>
</gmd:address> </gmd:address>
<gmd:hoursOfService> <gmd:hoursOfService>
<gco:CharacterString>08.00-16.00</gco:CharacterString> <gco:CharacterString>08.00-16.00</gco:CharacterString>
</gmd:hoursOfService> </gmd:hoursOfService>
</gmd:CI_Contact> </gmd:CI_Contact>
</gmd:contactInfo> </gmd:contactInfo>
<gmd:role> <gmd:role>
<gmd:CI_RoleCode codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_RoleCode" codeListValue="custodian"/> <gmd:CI_RoleCode codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_RoleCode" codeListValue="custodian"/>
</gmd:role> </gmd:role>
</gmd:CI_ResponsibleParty> </gmd:CI_ResponsibleParty>
</gmd:citedResponsibleParty> </gmd:citedResponsibleParty>
<gmd:otherCitationDetails> <gmd:otherCitationDetails>
<gco:CharacterString>Timezone: UTC+7 (Asia/Jakarta)</gco:CharacterString> <gco:CharacterString>Timezone: UTC+7 (Asia/Jakarta)</gco:CharacterString>
</gmd:otherCitationDetails> </gmd:otherCitationDetails>
</gmd:CI_Citation> </gmd:CI_Citation>
</gmd:citation> </gmd:citation>
<gmd:abstract> <gmd:abstract>
<gco:CharacterString>{meta['dataset_abstract']}</gco:CharacterString> <gco:CharacterString>{meta['dataset_abstract']}</gco:CharacterString>
</gmd:abstract> </gmd:abstract>
<gmd:purpose> <gmd:purpose>
<gco:CharacterString>{meta['dataset_abstract']}</gco:CharacterString> <gco:CharacterString>{meta['dataset_abstract']}</gco:CharacterString>
</gmd:purpose> </gmd:purpose>
<gmd:status> <gmd:status>
<gmd:MD_ProgressCode codeListValue="completed" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_ProgressCode"/> <gmd:MD_ProgressCode codeListValue="completed" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_ProgressCode"/>
</gmd:status> </gmd:status>
<gmd:pointOfContact> <gmd:pointOfContact>
<gmd:CI_ResponsibleParty> <gmd:CI_ResponsibleParty>
<gmd:individualName> <gmd:individualName>
<gco:CharacterString>Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur</gco:CharacterString> <gco:CharacterString>Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur</gco:CharacterString>
</gmd:individualName> </gmd:individualName>
<gmd:organisationName> <gmd:organisationName>
<gco:CharacterString>Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur</gco:CharacterString> <gco:CharacterString>Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur</gco:CharacterString>
</gmd:organisationName> </gmd:organisationName>
<gmd:positionName gco:nilReason="missing"/> <gmd:positionName gco:nilReason="missing"/>
<gmd:contactInfo> <gmd:contactInfo>
<gmd:CI_Contact> <gmd:CI_Contact>
<gmd:phone> <gmd:phone>
<gmd:CI_Telephone> <gmd:CI_Telephone>
<gmd:voice> <gmd:voice>
<gco:CharacterString>{meta['organization_phone']}</gco:CharacterString> <gco:CharacterString>{meta['organization_phone']}</gco:CharacterString>
</gmd:voice> </gmd:voice>
<gmd:facsimile> <gmd:facsimile>
<gco:CharacterString>{meta['organization_phone']}</gco:CharacterString> <gco:CharacterString>{meta['organization_phone']}</gco:CharacterString>
</gmd:facsimile> </gmd:facsimile>
</gmd:CI_Telephone> </gmd:CI_Telephone>
</gmd:phone> </gmd:phone>
<gmd:address> <gmd:address>
<gmd:CI_Address> <gmd:CI_Address>
<gmd:deliveryPoint> <gmd:deliveryPoint>
<gco:CharacterString>{meta['organization_address']}</gco:CharacterString> <gco:CharacterString>{meta['organization_address']}</gco:CharacterString>
</gmd:deliveryPoint> </gmd:deliveryPoint>
<gmd:city> <gmd:city>
<gco:CharacterString>Surabaya</gco:CharacterString> <gco:CharacterString>Surabaya</gco:CharacterString>
</gmd:city> </gmd:city>
<gmd:administrativeArea> <gmd:administrativeArea>
<gco:CharacterString>Jawa Timur</gco:CharacterString> <gco:CharacterString>Jawa Timur</gco:CharacterString>
</gmd:administrativeArea> </gmd:administrativeArea>
<gmd:country> <gmd:country>
<gco:CharacterString>Indonesia</gco:CharacterString> <gco:CharacterString>Indonesia</gco:CharacterString>
</gmd:country> </gmd:country>
<gmd:electronicMailAddress> <gmd:electronicMailAddress>
<gco:CharacterString>{meta['organization_email']}</gco:CharacterString> <gco:CharacterString>{meta['organization_email']}</gco:CharacterString>
</gmd:electronicMailAddress> </gmd:electronicMailAddress>
</gmd:CI_Address> </gmd:CI_Address>
</gmd:address> </gmd:address>
</gmd:CI_Contact> </gmd:CI_Contact>
</gmd:contactInfo> </gmd:contactInfo>
<gmd:role> <gmd:role>
<gmd:CI_RoleCode codeListValue="owner" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_RoleCode"/> <gmd:CI_RoleCode codeListValue="owner" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_RoleCode"/>
</gmd:role> </gmd:role>
</gmd:CI_ResponsibleParty> </gmd:CI_ResponsibleParty>
</gmd:pointOfContact> </gmd:pointOfContact>
<gmd:resourceMaintenance> <gmd:resourceMaintenance>
<gmd:MD_MaintenanceInformation> <gmd:MD_MaintenanceInformation>
<gmd:maintenanceAndUpdateFrequency> <gmd:maintenanceAndUpdateFrequency>
<gmd:MD_MaintenanceFrequencyCode codeListValue="annually" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_MaintenanceFrequencyCode"/> <gmd:MD_MaintenanceFrequencyCode codeListValue="annually" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_MaintenanceFrequencyCode"/>
</gmd:maintenanceAndUpdateFrequency> </gmd:maintenanceAndUpdateFrequency>
</gmd:MD_MaintenanceInformation> </gmd:MD_MaintenanceInformation>
</gmd:resourceMaintenance> </gmd:resourceMaintenance>
<gmd:descriptiveKeywords> <gmd:descriptiveKeywords>
<gmd:MD_Keywords> <gmd:MD_Keywords>
{keywords_xml} {keywords_xml}
</gmd:MD_Keywords> </gmd:MD_Keywords>
</gmd:descriptiveKeywords> </gmd:descriptiveKeywords>
<gmd:resourceConstraints> <gmd:resourceConstraints>
<gmd:MD_LegalConstraints> <gmd:MD_LegalConstraints>
<gmd:accessConstraints> <gmd:accessConstraints>
<gmd:MD_RestrictionCode codeListValue="copyright" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_RestrictionCode"/> <gmd:MD_RestrictionCode codeListValue="copyright" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_RestrictionCode"/>
</gmd:accessConstraints> </gmd:accessConstraints>
<gmd:useConstraints> <gmd:useConstraints>
<gmd:MD_RestrictionCode codeListValue="otherRestrictions" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_RestrictionCode"/> <gmd:MD_RestrictionCode codeListValue="otherRestrictions" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_RestrictionCode"/>
</gmd:useConstraints> </gmd:useConstraints>
<gmd:otherConstraints> <gmd:otherConstraints>
<gco:CharacterString>Penggunaan data harus mencantumkan sumber: {meta['organization_name']}.</gco:CharacterString> <gco:CharacterString>Penggunaan data harus mencantumkan sumber: {meta['organization_name']}.</gco:CharacterString>
</gmd:otherConstraints> </gmd:otherConstraints>
</gmd:MD_LegalConstraints> </gmd:MD_LegalConstraints>
</gmd:resourceConstraints> </gmd:resourceConstraints>
<gmd:spatialRepresentationType> <gmd:spatialRepresentationType>
<gmd:MD_SpatialRepresentationTypeCode codeListValue="vector" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_SpatialRepresentationTypeCode"/> <gmd:MD_SpatialRepresentationTypeCode codeListValue="vector" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_SpatialRepresentationTypeCode"/>
</gmd:spatialRepresentationType> </gmd:spatialRepresentationType>
<gmd:spatialResolution> <gmd:spatialResolution>
<gmd:MD_Resolution> <gmd:MD_Resolution>
<gmd:equivalentScale> <gmd:equivalentScale>
<gmd:MD_RepresentativeFraction> <gmd:MD_RepresentativeFraction>
<gmd:denominator> <gmd:denominator>
<gco:Integer>25000</gco:Integer> <gco:Integer>25000</gco:Integer>
</gmd:denominator> </gmd:denominator>
</gmd:MD_RepresentativeFraction> </gmd:MD_RepresentativeFraction>
</gmd:equivalentScale> </gmd:equivalentScale>
</gmd:MD_Resolution> </gmd:MD_Resolution>
</gmd:spatialResolution> </gmd:spatialResolution>
<gmd:language> <gmd:language>
<gmd:LanguageCode codeList="http://www.loc.gov/standards/iso639-2/" codeListValue="eng"/> <gmd:LanguageCode codeList="http://www.loc.gov/standards/iso639-2/" codeListValue="eng"/>
</gmd:language> </gmd:language>
<gmd:characterSet> <gmd:characterSet>
<gmd:MD_CharacterSetCode codeListValue="utf8" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_CharacterSetCode"/> <gmd:MD_CharacterSetCode codeListValue="utf8" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_CharacterSetCode"/>
</gmd:characterSet> </gmd:characterSet>
<gmd:extent> <gmd:extent>
<gmd:EX_Extent> <gmd:EX_Extent>
<gmd:geographicElement> <gmd:geographicElement>
<gmd:EX_GeographicBoundingBox> <gmd:EX_GeographicBoundingBox>
<gmd:westBoundLongitude><gco:Decimal>{extent['xmin']}</gco:Decimal></gmd:westBoundLongitude> <gmd:westBoundLongitude><gco:Decimal>{extent['xmin']}</gco:Decimal></gmd:westBoundLongitude>
<gmd:eastBoundLongitude><gco:Decimal>{extent['xmax']}</gco:Decimal></gmd:eastBoundLongitude> <gmd:eastBoundLongitude><gco:Decimal>{extent['xmax']}</gco:Decimal></gmd:eastBoundLongitude>
<gmd:southBoundLatitude><gco:Decimal>{extent['ymin']}</gco:Decimal></gmd:southBoundLatitude> <gmd:southBoundLatitude><gco:Decimal>{extent['ymin']}</gco:Decimal></gmd:southBoundLatitude>
<gmd:northBoundLatitude><gco:Decimal>{extent['ymax']}</gco:Decimal></gmd:northBoundLatitude> <gmd:northBoundLatitude><gco:Decimal>{extent['ymax']}</gco:Decimal></gmd:northBoundLatitude>
</gmd:EX_GeographicBoundingBox> </gmd:EX_GeographicBoundingBox>
</gmd:geographicElement> </gmd:geographicElement>
</gmd:EX_Extent> </gmd:EX_Extent>
</gmd:extent> </gmd:extent>
</gmd:MD_DataIdentification> </gmd:MD_DataIdentification>
</gmd:identificationInfo> </gmd:identificationInfo>
<gmd:contentInfo> <gmd:contentInfo>
<gmd:MD_FeatureCatalogueDescription> <gmd:MD_FeatureCatalogueDescription>
<gmd:complianceCode> <gmd:complianceCode>
<gco:Boolean>true</gco:Boolean> <gco:Boolean>true</gco:Boolean>
</gmd:complianceCode> </gmd:complianceCode>
<gmd:includedWithDataset gco:nilReason="unknown"/> <gmd:includedWithDataset gco:nilReason="unknown"/>
<gmd:featureCatalogueCitation> <gmd:featureCatalogueCitation>
<gmd:CI_Citation> <gmd:CI_Citation>
<gmd:title> <gmd:title>
<gco:CharacterString>{meta['dataset_title']}</gco:CharacterString> <gco:CharacterString>{meta['dataset_title']}</gco:CharacterString>
</gmd:title> </gmd:title>
<gmd:date> <gmd:date>
<gmd:CI_Date> <gmd:CI_Date>
<gmd:date> <gmd:date>
<gco:DateTime>{meta['created_at'].isoformat()}+07:00</gco:DateTime> <gco:DateTime>{meta['created_at'].isoformat()}+07:00</gco:DateTime>
</gmd:date> </gmd:date>
<gmd:dateType> <gmd:dateType>
<gmd:CI_DateTypeCode codeListValue="publication" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_DateTypeCode"/> <gmd:CI_DateTypeCode codeListValue="publication" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#CI_DateTypeCode"/>
</gmd:dateType> </gmd:dateType>
</gmd:CI_Date> </gmd:CI_Date>
</gmd:date> </gmd:date>
<gmd:edition> <gmd:edition>
<gco:CharacterString>{meta['date_created'].year}</gco:CharacterString> <gco:CharacterString>{meta['date_created'].year}</gco:CharacterString>
</gmd:edition> </gmd:edition>
</gmd:CI_Citation> </gmd:CI_Citation>
</gmd:featureCatalogueCitation> </gmd:featureCatalogueCitation>
</gmd:MD_FeatureCatalogueDescription> </gmd:MD_FeatureCatalogueDescription>
</gmd:contentInfo> </gmd:contentInfo>
<gmd:distributionInfo> <gmd:distributionInfo>
<gmd:MD_Distribution> <gmd:MD_Distribution>
<gmd:transferOptions> <gmd:transferOptions>
<gmd:MD_DigitalTransferOptions> <gmd:MD_DigitalTransferOptions>
<gmd:onLine> <gmd:onLine>
<gmd:CI_OnlineResource> <gmd:CI_OnlineResource>
<gmd:linkage> <gmd:linkage>
<gmd:URL>{geoserver_links["wms_url"]}</gmd:URL> <gmd:URL>{geoserver_links["wms_url"]}</gmd:URL>
</gmd:linkage> </gmd:linkage>
<gmd:protocol> <gmd:protocol>
<gco:CharacterString>DB:POSTGIS</gco:CharacterString> <gco:CharacterString>DB:POSTGIS</gco:CharacterString>
</gmd:protocol> </gmd:protocol>
<gmd:name> <gmd:name>
<gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString> <gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString>
</gmd:name> </gmd:name>
<gmd:description> <gmd:description>
<gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString> <gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString>
</gmd:description> </gmd:description>
</gmd:CI_OnlineResource> </gmd:CI_OnlineResource>
</gmd:onLine> </gmd:onLine>
<gmd:onLine> <gmd:onLine>
<gmd:CI_OnlineResource> <gmd:CI_OnlineResource>
<gmd:linkage> <gmd:linkage>
<gmd:URL>{geoserver_links["wms_url"]}</gmd:URL> <gmd:URL>{geoserver_links["wms_url"]}</gmd:URL>
</gmd:linkage> </gmd:linkage>
<gmd:protocol> <gmd:protocol>
<gco:CharacterString>WWW:LINK-1.0-http--link</gco:CharacterString> <gco:CharacterString>WWW:LINK-1.0-http--link</gco:CharacterString>
</gmd:protocol> </gmd:protocol>
<gmd:name> <gmd:name>
<gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString> <gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString>
</gmd:name> </gmd:name>
<gmd:description> <gmd:description>
<gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString> <gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString>
</gmd:description> </gmd:description>
</gmd:CI_OnlineResource> </gmd:CI_OnlineResource>
</gmd:onLine> </gmd:onLine>
<gmd:onLine> <gmd:onLine>
<gmd:CI_OnlineResource> <gmd:CI_OnlineResource>
<gmd:linkage> <gmd:linkage>
<gmd:URL>{geoserver_links["wms_url"]}</gmd:URL> <gmd:URL>{geoserver_links["wms_url"]}</gmd:URL>
</gmd:linkage> </gmd:linkage>
<gmd:protocol> <gmd:protocol>
<gco:CharacterString>OGC:WMS</gco:CharacterString> <gco:CharacterString>OGC:WMS</gco:CharacterString>
</gmd:protocol> </gmd:protocol>
<gmd:name> <gmd:name>
<gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString> <gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString>
</gmd:name> </gmd:name>
</gmd:CI_OnlineResource> </gmd:CI_OnlineResource>
</gmd:onLine> </gmd:onLine>
<gmd:onLine> <gmd:onLine>
<gmd:CI_OnlineResource> <gmd:CI_OnlineResource>
<gmd:linkage> <gmd:linkage>
<gmd:URL>{geoserver_links["wfs_url"]}</gmd:URL> <gmd:URL>{geoserver_links["wfs_url"]}</gmd:URL>
</gmd:linkage> </gmd:linkage>
<gmd:protocol> <gmd:protocol>
<gco:CharacterString>OGC:WFS</gco:CharacterString> <gco:CharacterString>OGC:WFS</gco:CharacterString>
</gmd:protocol> </gmd:protocol>
<gmd:name> <gmd:name>
<gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString> <gco:CharacterString>{meta["dataset_title"]}</gco:CharacterString>
</gmd:name> </gmd:name>
</gmd:CI_OnlineResource> </gmd:CI_OnlineResource>
</gmd:onLine> </gmd:onLine>
</gmd:MD_DigitalTransferOptions> </gmd:MD_DigitalTransferOptions>
</gmd:transferOptions> </gmd:transferOptions>
</gmd:MD_Distribution> </gmd:MD_Distribution>
</gmd:distributionInfo> </gmd:distributionInfo>
<gmd:dataQualityInfo> <gmd:dataQualityInfo>
<gmd:DQ_DataQuality> <gmd:DQ_DataQuality>
<gmd:scope> <gmd:scope>
<gmd:DQ_Scope> <gmd:DQ_Scope>
<gmd:level> <gmd:level>
<gmd:MD_ScopeCode codeListValue="dataset" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_ScopeCode"/> <gmd:MD_ScopeCode codeListValue="dataset" codeList="http://standards.iso.org/iso/19139/resources/gmxCodelists.xml#MD_ScopeCode"/>
</gmd:level> </gmd:level>
</gmd:DQ_Scope> </gmd:DQ_Scope>
</gmd:scope> </gmd:scope>
<gmd:lineage> <gmd:lineage>
<gmd:LI_Lineage> <gmd:LI_Lineage>
<gmd:statement> <gmd:statement>
<gco:CharacterString>Data dihasilkan dari digitasi peta dasar skala 1:25000 menggunakan QGIS.</gco:CharacterString> <gco:CharacterString>Data dihasilkan dari digitasi peta dasar skala 1:25000 menggunakan QGIS.</gco:CharacterString>
</gmd:statement> </gmd:statement>
</gmd:LI_Lineage> </gmd:LI_Lineage>
</gmd:lineage> </gmd:lineage>
</gmd:DQ_DataQuality> </gmd:DQ_DataQuality>
</gmd:dataQualityInfo> </gmd:dataQualityInfo>
</gmd:MD_Metadata> </gmd:MD_Metadata>
""" """
@ -584,7 +584,7 @@ def upload_metadata_to_geonetwork(xml_metadata: str):
# if not xsrf_token: # if not xsrf_token:
# raise HTTPException(status_code=500, detail="Could not retrieve XSRF-TOKEN from GeoNetwork.") # raise HTTPException(status_code=500, detail="Could not retrieve XSRF-TOKEN from GeoNetwork.")
session, xsrf_token = create_gn_session() session, xsrf_token = create_gn_session()
headers = { headers = {
'X-XSRF-TOKEN': xsrf_token, 'X-XSRF-TOKEN': xsrf_token,
@ -599,9 +599,9 @@ def upload_metadata_to_geonetwork(xml_metadata: str):
} }
params = { params = {
"ownerGroup": 1, # all "ownerGroup": 1, # all
"ownerUser": 1 # admin "ownerUser": 1 # admin
} }
response = session.post( response = session.post(
GN_API_RECORDS_URL, GN_API_RECORDS_URL,
@ -623,7 +623,7 @@ def upload_metadata_to_geonetwork(xml_metadata: str):
record = publish_record(session, uuid) record = publish_record(session, uuid)
print('[record]', record) print('[record]', record)
# print("response", response.json()) # print("response", response.json())
return response.json() return response.json()

View File

@ -348,10 +348,10 @@ def generate_metadata_xml(table_name, meta, extent, geoserver_links):
<gmd:pointOfContact> <gmd:pointOfContact>
<gmd:CI_ResponsibleParty> <gmd:CI_ResponsibleParty>
<gmd:individualName> <gmd:individualName>
<gco:CharacterString>Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur</gco:CharacterString> <gco:CharacterString>Lab AI Polinema</gco:CharacterString>
</gmd:individualName> </gmd:individualName>
<gmd:organisationName> <gmd:organisationName>
<gco:CharacterString>Dinas Tenaga Kerja dan Transmigrasi Provinsi Jawa Timur</gco:CharacterString> <gco:CharacterString>Lab AI Polinema</gco:CharacterString>
</gmd:organisationName> </gmd:organisationName>
<gmd:positionName gco:nilReason="missing"/> <gmd:positionName gco:nilReason="missing"/>
<gmd:contactInfo> <gmd:contactInfo>

View File

@ -150,14 +150,29 @@ def publish_layer_to_geoserver(table: str, job_id: str):
f"{GEOSERVER_URL}/{GEOSERVER_WORKSPACE}/wfs?" f"{GEOSERVER_URL}/{GEOSERVER_WORKSPACE}/wfs?"
f"service=WFS&request=GetFeature&typeName={GEOSERVER_WORKSPACE}:{table}" f"service=WFS&request=GetFeature&typeName={GEOSERVER_WORKSPACE}:{table}"
) )
print(f"[GeoServer] WMS URL: {wms_link}") # print(f"[GeoServer] WMS URL: {wms_link}")
print(f"[GeoServer] WFS URL: {wfs_link}") # print(f"[GeoServer] WFS URL: {wfs_link}")
print(f"[GeoServer] Reload completed. Layer {table} ready.") # print(f"[GeoServer] Reload completed. Layer {table} ready.")
openlayer_url = (
f"{GEOSERVER_URL}/{GEOSERVER_WORKSPACE}/wms?"
f"service=WMS"
f"&version=1.1.0"
f"&request=GetMap"
f"&layers={GEOSERVER_WORKSPACE}:{table}"
f"&styles="
f"&bbox=110.89528623700005%2C-8.780412043999945%2C116.26994997700001%2C-5.042971664999925"
f"&width=768"
f"&height=384"
f"&srs=EPSG:4326"
f"&format=application/openlayers"
)
return { return {
"table": table, "table": table,
"style": style_name, "style": style_name,
"wms_url": wms_link, "wms_url": wms_link,
"wfs_url": wfs_link "wfs_url": wfs_link,
"layer_url": openlayer_url
} }

View File

@ -11,8 +11,8 @@ from pyproj import CRS
from shapely.geometry.base import BaseGeometry from shapely.geometry.base import BaseGeometry
from shapely.geometry import base as shapely_base from shapely.geometry import base as shapely_base
from fastapi import Depends, File, Form, UploadFile, HTTPException from fastapi import Depends, File, Form, UploadFile, HTTPException
from api.routers.datasets_router import cleansing_data, publish_layer, query_cleansing_data from api.routers.datasets_router import cleansing_data, publish_layer, query_cleansing_data, upload_to_main
from core.config import UPLOAD_FOLDER, MAX_FILE_MB, VALID_WKT_PREFIXES from core.config import UPLOAD_FOLDER, MAX_FILE_MB, VALID_WKT_PREFIXES, GEONETWORK_URL
from services.upload_file.ai_generate import send_metadata from services.upload_file.ai_generate import send_metadata
from services.upload_file.readers.reader_csv import read_csv from services.upload_file.readers.reader_csv import read_csv
from services.upload_file.readers.reader_shp import read_shp from services.upload_file.readers.reader_shp import read_shp
@ -20,9 +20,7 @@ from services.upload_file.readers.reader_gdb import read_gdb
from services.upload_file.readers.reader_mpk import read_mpk from services.upload_file.readers.reader_mpk import read_mpk
from services.upload_file.readers.reader_pdf import convert_df, read_pdf from services.upload_file.readers.reader_pdf import convert_df, read_pdf
from services.upload_file.utils.geometry_detector import detect_and_build_geometry, attach_polygon_geometry_auto from services.upload_file.utils.geometry_detector import detect_and_build_geometry, attach_polygon_geometry_auto
from services.upload_file.upload_ws import report_progress
from database.connection import engine, sync_engine from database.connection import engine, sync_engine
from database.models import Base
from pydantic import BaseModel from pydantic import BaseModel
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from shapely import MultiLineString, MultiPolygon, wkt from shapely import MultiLineString, MultiPolygon, wkt
@ -306,8 +304,8 @@ def process_data(df: pd.DataFrame, ext: str, filename: str, fileDesc: str):
# "kategori_mapset": "" # "kategori_mapset": ""
# } # }
} }
# ai_suggest = send_metadata(ai_context) ai_suggest = send_metadata(ai_context)
ai_suggest = {'judul': 'Peta Risiko Letusan Gunung Arjuna di Provinsi Jawa Timur', 'abstrak': 'Peta ini menggambarkan wilayah berisiko letusan Gunung Arjuna yang berada di Provinsi Jawa Timur. Data disajikan dalam bentuk poligon yang menunjukkan zona risiko berdasarkan analisis potensi aktivitas vulkanik.', 'tujuan': 'Data dapat digunakan untuk perencanaan mitigasi bencana dan pengambilan keputusan di wilayah Jawa Timur.', 'keyword': ['Risiko letusan', 'Gunung Arjuna', 'Bencana alam', 'Provinsi Jawa Timur', 'Geologi'], 'kategori': ['Geoscientific information', 'Environment'], 'kategori_mapset': 'Lingkungan Hidup'} # ai_suggest = {'judul': 'Peta Risiko Letusan Gunung Arjuna di Provinsi Jawa Timur', 'abstrak': 'Peta ini menggambarkan wilayah berisiko letusan Gunung Arjuna yang berada di Provinsi Jawa Timur. Data disajikan dalam bentuk poligon yang menunjukkan zona risiko berdasarkan analisis potensi aktivitas vulkanik.', 'tujuan': 'Data dapat digunakan untuk perencanaan mitigasi bencana dan pengambilan keputusan di wilayah Jawa Timur.', 'keyword': ['Risiko letusan', 'Gunung Arjuna', 'Bencana alam', 'Provinsi Jawa Timur', 'Geologi'], 'kategori': ['Geoscientific information', 'Environment'], 'kategori_mapset': 'Lingkungan Hidup'}
# print(ai_suggest) # print(ai_suggest)
response = { response = {
@ -509,223 +507,279 @@ def save_xml_to_sld(xml_string, filename):
# async def handle_to_postgis(payload: UploadRequest, user_id: int = 2):
# try:
# table_name = await generate_unique_table_name(payload.title)
# # DataFrame
# df = pd.DataFrame(payload.rows)
# df.columns = [col.upper() for col in df.columns]
# if "GEOMETRY" not in df.columns:
# raise HTTPException(400, "Kolom GEOMETRY tidak ditemukan")
# # =====================================================================
# # 1. LOAD WKT → SHAPELY
# # =====================================================================
# def safe_load_wkt(g):
# if not isinstance(g, str):
# return None
# try:
# geom = wkt.loads(g)
# return geom
# except:
# return None
# df["GEOMETRY"] = df["GEOMETRY"].apply(safe_load_wkt)
# df = df.rename(columns={"GEOMETRY": "geom"})
# # =====================================================================
# # 2. DROP ROW geometry NULL
# # =====================================================================
# df = df[df["geom"].notnull()]
# if df.empty:
# raise HTTPException(400, "Semua geometry invalid atau NULL")
# # =====================================================================
# # 3. VALIDATE geometry (very important)
# # =====================================================================
# df["geom"] = df["geom"].apply(lambda g: g if g.is_valid else g.buffer(0))
# # =====================================================================
# # 4. SERAGAMKAN TIPE GEOMETRY (Polygon→MultiPolygon, Line→MultiLine)
# # =====================================================================
# def unify_geometry_type(g):
# gtype = g.geom_type.upper()
# if gtype == "POLYGON":
# return MultiPolygon([g])
# if gtype == "LINESTRING":
# return MultiLineString([g])
# return g # sudah MULTI atau POINT
# df["geom"] = df["geom"].apply(unify_geometry_type)
# # =====================================================================
# # 5. DETEKSI CRS DARI METADATA / INPUT / DEFAULT
# # =====================================================================
# detected_crs = payload.author.get("crs")
# detected = payload.author.get("crs")
# print('crs', detected)
# if not detected_crs:
# detected_crs = "EPSG:4326"
# detected_crs = 'EPSG:4326'
# # Buat GeoDataFrame
# gdf = gpd.GeoDataFrame(df, geometry="geom", crs=detected_crs)
# row_count = len(gdf)
# # =====================================================================
# # 6. VERIFY CRS (SRID) VALID di PROJ / PostGIS
# # =====================================================================
# try:
# _ = gdf.to_crs(gdf.crs) # test CRS valid
# except:
# raise HTTPException(400, f"CRS {detected_crs} tidak valid")
# # =====================================================================
# # 7. SIMPAN KE POSTGIS (synchronous)
# # =====================================================================
# loop = asyncio.get_running_loop()
# await loop.run_in_executor(
# None,
# lambda: gdf.to_postgis(
# table_name,
# sync_engine,
# if_exists="replace",
# index=False
# )
# )
# # =====================================================================
# # 8. ADD PRIMARY KEY (wajib untuk QGIS API)
# # =====================================================================
# async with engine.begin() as conn:
# await conn.execute(text(
# f'ALTER TABLE "{table_name}" ADD COLUMN _ID SERIAL PRIMARY KEY;'
# ))
# # =====================================================================
# # 9. SIMPAN METADATA (geom_type, author metadata)
# # =====================================================================
# unified_geom_type = list(gdf.geom_type.unique())
# author = payload.author
# async with engine.begin() as conn:
# await conn.execute(text("""
# INSERT INTO backend.author_metadata (
# table_title,
# dataset_title,
# dataset_abstract,
# keywords,
# topic_category,
# date_created,
# dataset_status,
# organization_name,
# contact_person_name,
# contact_email,
# contact_phone,
# geom_type,
# user_id,
# process,
# geometry_count
# ) VALUES (
# :table_title,
# :dataset_title,
# :dataset_abstract,
# :keywords,
# :topic_category,
# :date_created,
# :dataset_status,
# :organization_name,
# :contact_person_name,
# :contact_email,
# :contact_phone,
# :geom_type,
# :user_id,
# :process,
# :geometry_count
# )
# """), {
# "table_title": table_name,
# "dataset_title": payload.title,
# "dataset_abstract": author.get("abstract"),
# "keywords": author.get("keywords"),
# # "topic_category": author.get("topicCategory"),
# "topic_category": ", ".join(author.get("topicCategory")),
# "date_created": str_to_date(author.get("dateCreated")),
# "dataset_status": author.get("status"),
# "organization_name": author.get("organization"),
# "contact_person_name": author.get("contactName"),
# "contact_email": author.get("contactEmail"),
# "contact_phone": author.get("contactPhone"),
# "geom_type": json.dumps(unified_geom_type),
# "user_id": user_id,
# "process": 'CLEANSING',
# "geometry_count": row_count
# })
# # =====================================================================
# # 10. LOGGING
# # =====================================================================
# await log_activity(
# user_id=user_id,
# action_type="UPLOAD",
# action_title=f"Upload dataset {table_name}",
# details={"table_name": table_name, "rows": len(gdf)}
# )
# job_id = generate_job_id(str(user_id))
# result = {
# "job_id": job_id,
# "job_status": "wait",
# "table_name": table_name,
# "status": "success",
# "message": f"Tabel '{table_name}' berhasil dibuat.",
# "total_rows": len(gdf),
# "geometry_type": unified_geom_type,
# "crs": detected_crs,
# "metadata_uuid": ""
# }
# save_xml_to_sld(payload.style, job_id)
# await report_progress(job_id, "upload", 20, "Upload selesai")
# # cleansing_data(table_name, job_id)
# cleansing = await query_cleansing_data(table_name)
# result['job_status'] = cleansing
# uuid = await publish_layer(table_name, job_id)
# result['metadata_uuid'] = uuid
# return successRes(data=result)
# except Exception as e:
# await log_activity(
# user_id=user_id,
# action_type="ERROR",
# action_title="Upload gagal",
# details={"error": str(e)}
# )
# raise HTTPException(status_code=500, detail=str(e))
async def handle_to_postgis(payload: UploadRequest, user_id: int = 2): async def handle_to_postgis(payload: UploadRequest, user_id: int = 2):
try: try:
table_name = await generate_unique_table_name(payload.title)
# DataFrame
df = pd.DataFrame(payload.rows)
df.columns = [col.upper() for col in df.columns]
if "GEOMETRY" not in df.columns:
raise HTTPException(400, "Kolom GEOMETRY tidak ditemukan")
# =====================================================================
# 1. LOAD WKT → SHAPELY
# =====================================================================
def safe_load_wkt(g):
if not isinstance(g, str):
return None
try:
geom = wkt.loads(g)
return geom
except:
return None
df["GEOMETRY"] = df["GEOMETRY"].apply(safe_load_wkt)
df = df.rename(columns={"GEOMETRY": "geom"})
# =====================================================================
# 2. DROP ROW geometry NULL
# =====================================================================
df = df[df["geom"].notnull()]
if df.empty:
raise HTTPException(400, "Semua geometry invalid atau NULL")
# =====================================================================
# 3. VALIDATE geometry (very important)
# =====================================================================
df["geom"] = df["geom"].apply(lambda g: g if g.is_valid else g.buffer(0))
# =====================================================================
# 4. SERAGAMKAN TIPE GEOMETRY (Polygon→MultiPolygon, Line→MultiLine)
# =====================================================================
def unify_geometry_type(g):
gtype = g.geom_type.upper()
if gtype == "POLYGON":
return MultiPolygon([g])
if gtype == "LINESTRING":
return MultiLineString([g])
return g # sudah MULTI atau POINT
df["geom"] = df["geom"].apply(unify_geometry_type)
# =====================================================================
# 5. DETEKSI CRS DARI METADATA / INPUT / DEFAULT
# =====================================================================
detected_crs = payload.author.get("crs")
detected = payload.author.get("crs")
print('crs', detected)
if not detected_crs:
detected_crs = "EPSG:4326"
detected_crs = 'EPSG:4326'
# Buat GeoDataFrame
gdf = gpd.GeoDataFrame(df, geometry="geom", crs=detected_crs)
row_count = len(gdf)
# =====================================================================
# 6. VERIFY CRS (SRID) VALID di PROJ / PostGIS
# =====================================================================
try:
_ = gdf.to_crs(gdf.crs) # test CRS valid
except:
raise HTTPException(400, f"CRS {detected_crs} tidak valid")
# =====================================================================
# 7. SIMPAN KE POSTGIS (synchronous)
# =====================================================================
loop = asyncio.get_running_loop()
await loop.run_in_executor(
None,
lambda: gdf.to_postgis(
table_name,
sync_engine,
if_exists="replace",
index=False
)
)
# =====================================================================
# 8. ADD PRIMARY KEY (wajib untuk QGIS API)
# =====================================================================
async with engine.begin() as conn:
await conn.execute(text(
f'ALTER TABLE "{table_name}" ADD COLUMN _ID SERIAL PRIMARY KEY;'
))
# =====================================================================
# 9. SIMPAN METADATA (geom_type, author metadata)
# =====================================================================
unified_geom_type = list(gdf.geom_type.unique())
author = payload.author
async with engine.begin() as conn:
await conn.execute(text("""
INSERT INTO backend.author_metadata (
table_title,
dataset_title,
dataset_abstract,
keywords,
topic_category,
date_created,
dataset_status,
organization_name,
contact_person_name,
contact_email,
contact_phone,
geom_type,
user_id,
process,
geometry_count
) VALUES (
:table_title,
:dataset_title,
:dataset_abstract,
:keywords,
:topic_category,
:date_created,
:dataset_status,
:organization_name,
:contact_person_name,
:contact_email,
:contact_phone,
:geom_type,
:user_id,
:process,
:geometry_count
)
"""), {
"table_title": table_name,
"dataset_title": payload.title,
"dataset_abstract": author.get("abstract"),
"keywords": author.get("keywords"),
# "topic_category": author.get("topicCategory"),
"topic_category": ", ".join(author.get("topicCategory")),
"date_created": str_to_date(author.get("dateCreated")),
"dataset_status": author.get("status"),
"organization_name": author.get("organization"),
"contact_person_name": author.get("contactName"),
"contact_email": author.get("contactEmail"),
"contact_phone": author.get("contactPhone"),
"geom_type": json.dumps(unified_geom_type),
"user_id": user_id,
"process": 'CLEANSING',
"geometry_count": row_count
})
# =====================================================================
# 10. LOGGING
# =====================================================================
await log_activity(
user_id=user_id,
action_type="UPLOAD",
action_title=f"Upload dataset {table_name}",
details={"table_name": table_name, "rows": len(gdf)}
)
job_id = generate_job_id(str(user_id)) job_id = generate_job_id(str(user_id))
result = { result = {
"job_id": job_id, "job_id": job_id,
"job_status": "done", "job_status": "wait",
"table_name": "just for test", "table_name": table_name,
"status": "success", "status": "success",
"message": f"Tabel test berhasil dibuat.", "message": f"Tabel '{table_name}' berhasil dibuat.",
"total_rows": 10, "total_rows": len(gdf),
"geometry_type": "Polygon", "geometry_type": unified_geom_type,
"crs": "EPSG 4326", "crs": detected_crs,
"metadata_uuid": "-" "metadata_uuid": ""
} }
save_xml_to_sld(payload.style, job_id)
# await report_progress(job_id, "upload", 20, "Upload selesai")
# cleansing_data(table_name, job_id)
cleansing = await query_cleansing_data(table_name)
result['job_status'] = cleansing
publish = await publish_layer(table_name, job_id)
result['metadata_uuid'] = publish['uuid']
mapset = {
"name": payload.title,
"description": author.get("abstract"),
"scale": "1:25000",
"projection_system_id": "0196c746-d1ba-7f1c-9706-5df738679cc7",
"category_id": author.get("mapsetCategory"),
"data_status": "sementara",
"classification_id": "01968b4b-d3f9-76c9-888c-ee887ac31ce4",
"producer_id": "019bd4ea-eb33-704e-83c3-8253d457b187",
"layer_type": unified_geom_type[0],
"source_id": ["019bd4e7-3df8-75c8-9b89-3f310967649c"],
"layer_url": publish['geos_link'],
"metadata_url": f"{GEONETWORK_URL}/srv/eng/catalog.search#/metadata/{publish['uuid']}",
"coverage_level": "provinsi",
"coverage_area": "kabupaten",
"data_update_period": "Tahunan",
"data_version": "2026",
"is_popular": False,
"is_active": True,
"regional_id": "01968b53-a910-7a67-bd10-975b8923b92e",
"notes": "Mapset baru dibuat",
"status_validation": "on_verification",
}
print("mapset data",mapset)
await upload_to_main(mapset)
return successRes(data=result) return successRes(data=result)
except Exception as e: except Exception as e:
print("gtw") await log_activity(
user_id=user_id,
action_type="ERROR",
action_title="Upload gagal",
details={"error": str(e)}
)
raise HTTPException(status_code=500, detail=str(e))
# async def handle_to_postgis(payload: UploadRequest, user_id: int = 2):
# try:
# job_id = generate_job_id(str(user_id))
# result = {
# "job_id": job_id,
# "job_status": "done",
# "table_name": "just for test",
# "status": "success",
# "message": f"Tabel test berhasil dibuat.",
# "total_rows": 10,
# "geometry_type": "Polygon",
# "crs": "EPSG 4326",
# "metadata_uuid": "-"
# }
# mapset = {
# "name": "Resiko Letusan Gunung Arjuno",
# "description": "Testing Automation Upload",
# "scale": "1:25000",
# "projection_system_id": "0196c746-d1ba-7f1c-9706-5df738679cc7",
# "category_id": "0196c80c-855f-77f9-abd0-0c8a30b8c2f5",
# "data_status": "sementara",
# "classification_id": "01968b4b-d3f9-76c9-888c-ee887ac31ce4",
# "producer_id": "019bd4ea-eb33-704e-83c3-8253d457b187",
# "layer_type": "polygon",
# "source_id": ["019bd4e7-3df8-75c8-9b89-3f310967649c"],
# "layer_url": "http://192.168.60.24:8888/geoserver/wms?service=WMS&version=1.1.0&request=GetMap&layers=labai:risiko_letusan_gunung_arjuno_bromo&bbox=110.89528623700005,-8.780412043999945,116.26994997700001,-5.042971664999925&width=768&height=534&srs=EPSG:4326&styles=&format=application/openlayers",
# "metadata_url": "http://192.168.60.24:7777/geonetwork/srv/eng/catalog.search#/metadata/9e5e2f09-13ef-49b5-bb49-1cb12136f63b",
# "coverage_level": "provinsi",
# "coverage_area": "kabupaten",
# "data_update_period": "Tahunan",
# "data_version": "2026",
# "is_popular": False,
# "is_active": True,
# "regional_id": "01968b53-a910-7a67-bd10-975b8923b92e",
# "notes": "Mapset baru dibuat",
# "status_validation": "on_verification",
# }
# await upload_to_main(mapset)
# return successRes(data=result)
# except Exception as e:
# print("errot", e)

View File

@ -1,27 +0,0 @@
# app/jobs/progress.py
from typing import Dict
from api.routers.ws.manager import manager
# state job (in-memory dulu)
job_state: Dict[str, dict] = {}
async def report_progress(
job_id: str,
step: str,
progress: int,
message: str
):
"""
Fungsi tunggal untuk update & broadcast progress job
"""
job_state[job_id] = {
"job_id": job_id,
"step": step,
"progress": progress,
"message": message,
}
# push ke websocket
await manager.send(job_id, job_state[job_id])

View File

@ -1,30 +0,0 @@
# utils/qgis_init.py
import os
import sys
# Lokasi instalasi QGIS di Linux (Ubuntu / Debian)
QGIS_PREFIX = "/usr"
# Path modul Python QGIS
sys.path.append("/usr/share/qgis/python")
# Environment variable agar QGIS dapat berjalan headless (tanpa GUI)
os.environ["QGIS_PREFIX_PATH"] = QGIS_PREFIX
os.environ["QT_QPA_PLATFORM"] = "offscreen"
from qgis.core import QgsApplication
from qgis.analysis import QgsNativeAlgorithms
import processing
from processing.core.Processing import Processing
def init_qgis():
qgs = QgsApplication([], False)
qgs.initQgis()
# Register QGIS processing provider
Processing.initialize()
QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms())
print("QGIS initialized successfully")
return qgs