From d29f72a40bd559c762aacbb3be32dc6c0278d7e1 Mon Sep 17 00:00:00 2001 From: Meninjar Mulyono Date: Wed, 24 Sep 2025 19:13:54 +0700 Subject: [PATCH] first commit --- .air.toml | 46 + .gitignore | 34 + .goreleaser.yml | 42 + Dockerfile | 19 + Makefile | 49 + README.md | 569 ++++++ cmd/api/main.go | 86 + cmd/logging/main.go | 109 ++ diagnostic/main.go | 130 ++ docker-compose.yml | 194 ++ docs/docs.go | 1169 +++++++++++ docs/swagger.json | 1150 +++++++++++ docs/swagger.yaml | 771 ++++++++ example.env | 92 + go.mod | 90 + go.sum | 361 ++++ internal/config/config.go | 739 +++++++ internal/database/database.go | 699 +++++++ internal/handlers/auth/auth.go | 132 ++ internal/handlers/auth/token.go | 95 + internal/handlers/healthcheck/healthcheck.go | 24 + internal/handlers/retribusi/retribusi.go | 1401 +++++++++++++ internal/middleware/auth_middleware.go | 59 + internal/middleware/error_handler.go | 54 + internal/middleware/jwt_middleware.go | 77 + internal/middleware/keycloak_middleware.go | 254 +++ internal/models/auth/auth.go | 31 + internal/models/models.go | 221 +++ internal/models/retribusi/retribusi.go | 228 +++ internal/models/validation.go | 106 + internal/routes/v1/routes.go | 151 ++ internal/server/server.go | 53 + internal/services/auth/auth.go | 169 ++ internal/utils/filters/dynamic_filter.go | 593 ++++++ internal/utils/filters/query_parser.go | 241 +++ .../utils/validation/duplicate_validator.go | 141 ++ pkg/logger/README.md | 356 ++++ pkg/logger/config.go | 137 ++ pkg/logger/context.go | 142 ++ pkg/logger/logger.go | 616 ++++++ pkg/logger/middleware.go | 191 ++ pkg/utils/etag.go | 54 + pkg/utils/utils | 0 pkg/validator/validator | 1 + scripts/scripts | 0 tools/general/generate-handler.go | 1740 +++++++++++++++++ 46 files changed, 13616 insertions(+) create mode 100644 .air.toml create mode 100644 .gitignore create mode 100644 .goreleaser.yml create mode 100644 Dockerfile create mode 100644 Makefile create mode 100644 README.md create mode 100644 cmd/api/main.go create mode 100644 cmd/logging/main.go create mode 100644 diagnostic/main.go create mode 100644 docker-compose.yml create mode 100644 docs/docs.go create mode 100644 docs/swagger.json create mode 100644 docs/swagger.yaml create mode 100644 example.env create mode 100644 go.mod create mode 100644 go.sum create mode 100644 internal/config/config.go create mode 100644 internal/database/database.go create mode 100644 internal/handlers/auth/auth.go create mode 100644 internal/handlers/auth/token.go create mode 100644 internal/handlers/healthcheck/healthcheck.go create mode 100644 internal/handlers/retribusi/retribusi.go create mode 100644 internal/middleware/auth_middleware.go create mode 100644 internal/middleware/error_handler.go create mode 100644 internal/middleware/jwt_middleware.go create mode 100644 internal/middleware/keycloak_middleware.go create mode 100644 internal/models/auth/auth.go create mode 100644 internal/models/models.go create mode 100644 internal/models/retribusi/retribusi.go create mode 100644 internal/models/validation.go create mode 100644 internal/routes/v1/routes.go create mode 100644 internal/server/server.go create mode 100644 internal/services/auth/auth.go create mode 100644 internal/utils/filters/dynamic_filter.go create mode 100644 internal/utils/filters/query_parser.go create mode 100644 internal/utils/validation/duplicate_validator.go create mode 100644 pkg/logger/README.md create mode 100644 pkg/logger/config.go create mode 100644 pkg/logger/context.go create mode 100644 pkg/logger/logger.go create mode 100644 pkg/logger/middleware.go create mode 100644 pkg/utils/etag.go create mode 100644 pkg/utils/utils create mode 100644 pkg/validator/validator create mode 100644 scripts/scripts create mode 100644 tools/general/generate-handler.go diff --git a/.air.toml b/.air.toml new file mode 100644 index 0000000..f127ea0 --- /dev/null +++ b/.air.toml @@ -0,0 +1,46 @@ +root = "." +testdata_dir = "testdata" +tmp_dir = "tmp" + +[build] + args_bin = [] + bin = ".\\main.exe" + cmd = "make build" + delay = 1000 + exclude_dir = ["assets", "tmp", "vendor", "testdata", "node_modules"] + exclude_file = [] + exclude_regex = ["_test.go"] + exclude_unchanged = false + follow_symlink = false + full_bin = "" + include_dir = [] + include_ext = ["go", "tpl", "tmpl", "html"] + include_file = [] + kill_delay = "0s" + log = "build-errors.log" + poll = false + poll_interval = 0 + post_cmd = [] + pre_cmd = [] + rerun = false + rerun_delay = 500 + send_interrupt = false + stop_on_error = false + +[color] + app = "" + build = "yellow" + main = "magenta" + runner = "green" + watcher = "cyan" + +[log] + main_only = false + time = false + +[misc] + clean_on_exit = false + +[screen] + clear_on_rebuild = false + keep_scroll = true diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5cae497 --- /dev/null +++ b/.gitignore @@ -0,0 +1,34 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with "go test -c" +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work +tmp/ + +# IDE specific files +.vscode +.idea + +# .env file +.env + +# Project build +main +*templ.go + +# OS X generated file +.DS_Store + diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 0000000..e001bf5 --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,42 @@ +version: 2 +before: + hooks: + - go mod tidy + +env: + - PACKAGE_PATH=github.com///cmd + +builds: +- binary: "{{ .ProjectName }}" + main: ./cmd/api + goos: + - darwin + - linux + - windows + goarch: + - amd64 + - arm64 + env: + - CGO_ENABLED=0 + ldflags: + - -s -w -X {{.Env.PACKAGE_PATH}}={{.Version}} +release: + prerelease: auto + +universal_binaries: +- replace: true + +archives: + - name_template: > + {{- .ProjectName }}_{{- .Version }}_{{- title .Os }}_{{- if eq .Arch "amd64" }}x86_64{{- else if eq .Arch "386" }}i386{{- else }}{{ .Arch }}{{ end }}{{- if .Arm }}v{{ .Arm }}{{ end -}} + format_overrides: + - goos: windows + format: zip + builds_info: + group: root + owner: root + files: + - README.md + +checksum: + name_template: 'checksums.txt' diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..4b4c9ce --- /dev/null +++ b/Dockerfile @@ -0,0 +1,19 @@ +FROM golang:1.24.4-alpine AS build + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +RUN go build -o main cmd/api/main.go + +FROM alpine:3.20.1 AS prod +WORKDIR /app +COPY --from=build /app/main /app/main +COPY --from=build /app/.env /app/.env +EXPOSE 8080 +CMD ["./main"] + + diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..0c2392a --- /dev/null +++ b/Makefile @@ -0,0 +1,49 @@ +# Simple Makefile for a Go project + +# Build the application +all: build test + +build: + @echo "Building..." + + + @go build -o main.exe cmd/api/main.go + +# Run the application +run: + @go run cmd/api/main.go +# Create DB container +docker-run: + @docker compose up --build + +# Shutdown DB container +docker-down: + @docker compose down + +# Test the application +test: + @echo "Testing..." + @go test ./... -v +# Integrations Tests for the application +itest: + @echo "Running integration tests..." + @go test ./internal/database -v + +# Clean the binary +clean: + @echo "Cleaning..." + @rm -f main + +# Live Reload +watch: + @powershell -ExecutionPolicy Bypass -Command "if (Get-Command air -ErrorAction SilentlyContinue) { \ + air; \ + Write-Output 'Watching...'; \ + } else { \ + Write-Output 'Installing air...'; \ + go install github.com/air-verse/air@latest; \ + air; \ + Write-Output 'Watching...'; \ + }" + +.PHONY: all build run test clean watch docker-run docker-down itest diff --git a/README.md b/README.md new file mode 100644 index 0000000..5d67144 --- /dev/null +++ b/README.md @@ -0,0 +1,569 @@ + +# ๐Ÿš€ API Service - CURD Management System + +> **Sistem manajemen retribusi modern dengan arsitektur bersih untuk pengelolaan data retribusi pemerintah** + +## ๐Ÿ“‘ Daftar Isi + +- [โœจ Fitur Utama](#-fitur-utama) +- [๐Ÿ—๏ธ Arsitektur](#%EF%B8%8F-arsitektur) +- [โšก Quick Start](#-quick-start) +- [๐Ÿ” Autentikasi](#-autentikasi) +- [๐Ÿ“Š API Endpoints](#-api-endpoints) +- [๐Ÿ› ๏ธ Development](#%EF%B8%8F-development) +- [๐Ÿš€ Deployment](#-deployment) +- [๐Ÿ“š Dokumentasi](#-dokumentasi) + +*** + +## โœจ Fitur Utama + +### Core Features + +- **๐Ÿ”’ JWT Authentication** - Sistem autentikasi dengan Keycloak integration +- **๐Ÿ“‹ Retribusi Management** - CRUD lengkap untuk data retribusi +- **๐Ÿ” Dynamic Filtering** - Filter dan pencarian data retribusi secara dinamis +- **๐Ÿ“Š Advanced Search** - Pencarian dengan multiple fields dan operators +- **๐Ÿฅ BPJS Integration** - Integrasi dengan layanan kesehatan BPJS +- **๐Ÿฉบ SATUSEHAT Integration** - Integrasi dengan platform kesehatan SATUSEHAT +- **๐Ÿ“– API Documentation** - Swagger/OpenAPI yang interaktif + +### Developer Experience + +- **๐Ÿ”ฅ Hot Reload** - Development dengan auto-restart +- **๐Ÿณ Docker Ready** - Deployment yang mudah +- **โšก Code Generator** - Tools untuk generate handler dan model +- **๐Ÿงช Testing Suite** - Unit dan integration tests +- **๐Ÿ“Š Health Monitoring** - Monitoring kesehatan aplikasi +- **๐Ÿ—„๏ธ Multi Database** - Support PostgreSQL, MySQL, dan MongoDB + +*** + +## ๐Ÿ—๏ธ Arsitektur + +### Clean Architecture Layers + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Presentation Layer โ”‚ โ† handlers/, routes/ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ Application Layer โ”‚ โ† middleware/, services/ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ Domain Layer โ”‚ โ† models/, validation/ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ Infrastructure Layer โ”‚ โ† database/, external APIs +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +### Struktur Project + +``` +api-service/ +โ”œโ”€โ”€ ๐Ÿ“ cmd/ +โ”‚ โ”œโ”€โ”€ api/main.go # ๐Ÿšช Entry point aplikasi +โ”‚ โ””โ”€โ”€ logging/main.go # ๐Ÿ” Logging service +โ”œโ”€โ”€ ๐Ÿ“ internal/ # ๐Ÿ  Core business logic +โ”‚ โ”œโ”€โ”€ config/ # โš™๏ธ Configuration management +โ”‚ โ”œโ”€โ”€ database/ # ๐Ÿ—„๏ธ Database connections +โ”‚ โ”œโ”€โ”€ handlers/ # ๐ŸŽฎ HTTP controllers +โ”‚ โ”‚ โ”œโ”€โ”€ auth/ # ๐Ÿ” Authentication handlers +โ”‚ โ”‚ โ”œโ”€โ”€ healthcheck/ # ๐Ÿ’š Health check handlers +โ”‚ โ”‚ โ””โ”€โ”€ retribusi/ # ๐Ÿ“‹ Retribusi handlers +โ”‚ โ”œโ”€โ”€ middleware/ # ๐Ÿ›ก๏ธ Auth & validation middleware +โ”‚ โ”œโ”€โ”€ models/ # ๐Ÿ“Š Data structures +โ”‚ โ”‚ โ”œโ”€โ”€ auth/ # ๐Ÿ‘ค Auth models +โ”‚ โ”‚ โ””โ”€โ”€ retribusi/ # ๐Ÿ“‹ Retribusi models +โ”‚ โ”œโ”€โ”€ routes/ # ๐Ÿ›ฃ๏ธ API routing +โ”‚ โ”œโ”€โ”€ services/ # ๐Ÿ’ผ Business logic services +โ”‚ โ”‚ โ””โ”€โ”€ auth/ # ๐Ÿ” Auth services +โ”‚ โ”œโ”€โ”€ utils/ # ๐Ÿ› ๏ธ Utility functions +โ”‚ โ”‚ โ”œโ”€โ”€ filters/ # ๐Ÿ” Dynamic filtering +โ”‚ โ”‚ โ””โ”€โ”€ validation/ # โœ… Data validation +โ”‚ โ””โ”€โ”€ server/ # ๐ŸŒ HTTP server setup +โ”œโ”€โ”€ ๐Ÿ“ docs/ # ๐Ÿ“š Documentation +โ”œโ”€โ”€ ๐Ÿ“ examples/ # ๐Ÿ’ก Example files +โ”œโ”€โ”€ ๐Ÿ“ scripts/ # ๐Ÿ“œ Automation scripts +โ””โ”€โ”€ ๐Ÿ“ tools/ # ๐Ÿ”ง Development tools +``` + +*** + +## โšก Quick Start + +### 1๏ธโƒฃ Setup Environment (2 menit) + +```bash +# Clone repository +git clone +cd api-service + +# Setup environment +cp example.env .env +``` + +### 2๏ธโƒฃ Pilih Method Setup + +**๐Ÿณ Docker (Recommended)** + +```bash +make docker-run +``` + +**๐Ÿ”ง Manual Setup** + +```bash +# Install dependencies +go mod download + +# Start server +go run cmd/api/main.go +``` + +### Update Swagger Documentation + +```bash +swag init -g cmd/api/main.go -o docs/ +``` + +### 3๏ธโƒฃ Verify Installation + +| Service | URL | Status | +| :-- | :-- | :-- | +| **API** | http://localhost:8080/api/v1 | โœ… | +| **Swagger** | http://localhost:8080/swagger/index.html | ๐Ÿ“– | +| **Health Check** | http://localhost:8080/api/sistem/health | ๐Ÿ’š | + +*** + +## ๐Ÿ” Autentikasi + +### Login & Mendapatkan Token + +```bash +curl -X POST http://localhost:8080/api/v1/auth/login \ + -H "Content-Type: application/json" \ + -d '{ + "username": "admin", + "password": "password" + }' +``` + +**Response:** + +```json +{ + "access_token": "eyJhbGciOiJIUzI1NiIs...", + "expires_in": 3600, + "user": { + "id": "123", + "username": "admin", + "role": "admin" + } +} +``` + +### Menggunakan Token + +```bash +curl -X GET http://localhost:8080/api/v1/retribusi \ + -H "Authorization: Bearer " +``` + +### Demo Accounts + +| Username | Password | Role | Akses | +| :-- | :-- | :-- | +| `admin` | `password` | Admin | Semua endpoint | +| `user` | `password` | User | Read-only | + +*** + +## ๐Ÿ“Š API Endpoints + +### ๐ŸŒ Public Endpoints + +| Method | Endpoint | Deskripsi | +| :-- | :-- | :-- | +| `POST` | `/api/v1/auth/login` | Login pengguna | +| `POST` | `/api/v1/auth/register` | Registrasi pengguna baru | +| `GET` | `/api/sistem/health` | Status kesehatan API | +| `GET` | `/api/sistem/info` | Informasi sistem | + +### ๐Ÿ”’ Protected Endpoints + +#### System Information + +| Method | Endpoint | Deskripsi | +| :-- | :-- | :-- | +| `GET` | `/api/sistem/databases` | Informasi database connections | + +#### Retribusi Management + +| Method | Endpoint | Deskripsi | +| :-- | :-- | :-- | +| `GET` | `/api/v1/retribusi` | List semua retribusi dengan pagination | +| `GET` | `/api/v1/retribusi/dynamic` | Query retribusi dengan filter dinamis | +| `GET` | `/api/v1/retribusi/search` | Pencarian retribusi advanced | +| `GET` | `/api/v1/retribusi/id/:id` | Detail retribusi by ID | +| `POST` | `/api/v1/retribusi` | Buat retribusi baru | +| `PUT` | `/api/v1/retribusi/id/:id` | Update retribusi | +| `DELETE` | `/api/v1/retribusi/id/:id` | Hapus retribusi (soft delete) | + +#### Dynamic Query Examples + +**Filter berdasarkan jenis:** + +```bash +GET /api/v1/retribusi/dynamic?filter[Jenis][_eq]=RETRIBUSI PELAYANAN KESEHATAN +``` + +**Kombinasi filter:** + +```bash +GET /api/v1/retribusi/dynamic?filter[status][_eq]=active&filter[Tarif][_gt]=100000 +``` + +**Pagination dan sorting:** + +```bash +GET /api/v1/retribusi/dynamic?sort=-date_created&limit=10&offset=20 +``` + +**Advanced search:** + +```bash +GET /api/v1/retribusi/search?q=rumah%20sakit&limit=20&offset=0 +``` + +### ๐Ÿฅ External Integrations + +#### BPJS Integration + +| Method | Endpoint | Deskripsi | +| :-- | :-- | :-- | +| `GET` | `/api/v1/bpjs/peserta/:no` | Data peserta BPJS | +| `GET` | `/api/v1/bpjs/rujukan/:no` | Data rujukan | + +#### SATUSEHAT Integration + +| Method | Endpoint | Deskripsi | +| :-- | :-- | :-- | +| `GET` | `/api/v1/satusehat/patient/:id` | Data pasien | +| `POST` | `/api/v1/satusehat/encounter` | Buat encounter baru | + +*** + +## ๐Ÿ› ๏ธ Development + +### Development Commands + +```bash +# ๐Ÿ”ฅ Development dengan hot reload +make watch + +# ๐Ÿƒ Run server +make run + +# ๐Ÿงช Testing +make test # Unit tests +make itest # Integration tests + +# ๐Ÿณ Docker operations +make docker-run # Start all services +make docker-down # Stop all services + +# ๐Ÿ” Code quality +make build # Build application +make clean # Clean build artifacts +``` + +### Environment Configuration + +**๐Ÿ“ .env File:** + +```bash +# Server Configuration +PORT=8080 +GIN_MODE=debug + +# Database Configuration +DB_CONNECTION=postgres +DB_HOST=localhost +DB_PORT=5432 +DB_USERNAME=postgres +DB_PASSWORD=postgres +DB_DATABASE=api_service + +# PostgreSQL Satudata Database +POSTGRES_SATUDATA_CONNECTION=postgres +POSTGRES_SATUDATA_HOST=localhost +POSTGRES_SATUDATA_PORT=5432 +POSTGRES_SATUDATA_USERNAME=postgres +POSTGRES_SATUDATA_PASSWORD=postgres +POSTGRES_SATUDATA_DATABASE=satu_db + +# MongoDB Configuration +MONGODB_MONGOHL7_CONNECTION=mongodb +MONGODB_MONGOHL7_HOST=localhost +MONGODB_MONGOHL7_PORT=27017 +MONGODB_MONGOHL7_USER=admin +MONGODB_MONGOHL7_PASS=password + +# MySQL Medical Database +MYSQL_MEDICAL_CONNECTION=mysql +MYSQL_MEDICAL_HOST=localhost +MYSQL_MEDICAL_PORT=3306 +MYSQL_MEDICAL_USERNAME=user +MYSQL_MEDICAL_PASSWORD=password +MYSQL_MEDICAL_DATABASE=healtcare_database + +# JWT Configuration +JWT_SECRET=your-super-secret-key-change-in-production + +# Keycloak Configuration +KEYCLOAK_ISSUER=https://auth.rssa.top/realms/sandbox +KEYCLOAK_AUDIENCE=nuxtsim-pendaftaran +KEYCLOAK_JWKS_URL=https://auth.rssa.top/realms/sandbox/protocol/openid-connect/certs +KEYCLOAK_ENABLED=true + +# BPJS Configuration +BPJS_BASEURL=https://apijkn.bpjs-kesehatan.go.id/vclaim-dev +BPJS_CONSID=52667757 +BPJS_USERKEY=4cf1cbef811314fvdgrc008440bbe9ef9ba789e482 +BPJS_SECRETKEY=1bV36ASDQQ3512D + + +``` + +### Code Generation + +**Generate Handler untuk Retribusi:** + +```bash +# Generate handler dasar +go run tools/general/generate-handler.go retribusi get post put delete + +# Generate dengan fitur advanced +go run tools/general/generate-handler.go retribusi get post put delete dynamic search stats +``` + +*** + +## ๐Ÿš€ Deployment + +### ๐Ÿณ Docker Deployment + +**Development:** + +```bash +# Start semua services +make docker-run + +# Stop services +make docker-down +``` + +**Production:** + +```bash +# Build production image +docker build -t api-service:prod . + +# Run production container +docker run -d \ + --name api-service \ + -p 8080:8080 \ + --env-file .env.prod \ + api-service:prod +``` + +### ๐Ÿ”ง Manual Deployment + +```bash +# Build aplikasi +make build + +# Start server +./main +``` + +### ๐Ÿ“‹ Environment Variables untuk Production + +```bash +# Server Configuration +APP_ENV=production +PORT=8080 +GIN_MODE=release + +# Database Configuration +DB_CONNECTION=postgres +DB_HOST=10.10.123.165 +DB_PORT=5432 +DB_USERNAME=stim +DB_PASSWORD=stim*RS54 +DB_DATABASE=satu_db + +# Security +JWT_SECRET=your-production-secret-key +KEYCLOAK_ENABLED=true + +# External Services +BPJS_BASEURL=https://apijkn.bpjs-kesehatan.go.id/vclaim-rest +BRIDGING_SATUSEHAT_BASE_URL=https://api-satusehat.kemkes.go.id/fhir-r4/v1 +``` + +*** + +## ๐Ÿ“š Dokumentasi + +### ๐Ÿ“– Interactive API Documentation + +Kunjungi **Swagger UI** di: http://localhost:8080/swagger/index.html + +**Cara menggunakan:** + +1. ๐Ÿ”‘ Login melalui `/auth/login` endpoint +2. ๐Ÿ“‹ Copy token dari response +3. ๐Ÿ”“ Klik tombol "Authorize" di Swagger +4. ๐Ÿ“ Masukkan: `Bearer ` +5. โœ… Test semua endpoint yang tersedia + +### ๐Ÿงช Testing Examples + +**JavaScript/Axios:** + +```javascript +// Login dan set token +const auth = await axios.post('/api/v1/auth/login', { + username: 'admin', + password: 'password' +}); + +axios.defaults.headers.common['Authorization'] = + `Bearer ${auth.data.access_token}`; + +// Fetch retribusi data +const retribusi = await axios.get('/api/v1/retribusi'); +console.log(retribusi.data); +``` + +**cURL Examples:** + +```bash +# Login +TOKEN=$(curl -s -X POST http://localhost:8080/api/v1/auth/login \ + -H "Content-Type: application/json" \ + -d '{"username":"admin","password":"password"}' | jq -r '.access_token') + +# Get retribusi data +curl -H "Authorization: Bearer $TOKEN" \ + http://localhost:8080/api/v1/retribusi + +# Dynamic filtering +curl -H "Authorization: Bearer $TOKEN" \ + "http://localhost:8080/api/v1/retribusi/dynamic?filter[Jenis][_eq]=RETRIBUSI%20PELAYANAN%20KESEHATAN" +``` + +### ๐Ÿ” Health Monitoring + +```bash +# Basic health check +curl http://localhost:8080/api/sistem/health + +# Database status +curl http://localhost:8080/api/sistem/databases + +# System info +curl http://localhost:8080/api/sistem/info +``` + +**Response:** + +```json +{ + "status": "healthy", + "timestamp": "2025-01-10T05:39:00Z", + "services": { + "database": "connected", + "keycloak": "accessible", + "bpjs_api": "accessible", + "satusehat_api": "accessible" + }, + "version": "1.0.0" +} +``` + +*** + +## ๐Ÿšจ Troubleshooting + +### Masalah Umum + +**โŒ Database Connection Error** + +```bash +# Cek status database +make docker-run + +# Check logs +docker logs api-service + +# Verify environment variables +cat .env | grep DB_ +``` + +**โŒ Authentication Error** + +- ๐Ÿ”„ Pastikan Keycloak service berjalan +- โœ… Cek KEYCLOAK_ISSUER URL +- ๐Ÿ“ Format token harus: `Bearer ` + +**โŒ Dynamic Filter Error** + +- โœ… Pastikan field name sesuai dengan database +- ๐Ÿ” Cek syntax filter: `filter[field][operator]=value` +- ๐Ÿ“ Operator yang didukung: `_eq`, `_neq`, `_gt`, `_lt`, `_contains` + +### Debug Mode + +```bash +# Enable debug logging +export GIN_MODE=debug + +# Run dengan verbose output +make run + +# Monitor dengan hot reload +make watch +``` + +*** + +## ๐ŸŽฏ Next Steps + +### ๐Ÿ“‹ Development Roadmap + +- [ ] โœ… **Setup environment selesai** +- [ ] โœ… **Implementasi retribusi management** +- [ ] โœ… **Setup authentication dengan Keycloak** +- [ ] ๐Ÿ”„ **Integrasi BPJS dan SATUSEHAT** +- [ ] ๐Ÿ”„ **Testing dan validation** +- [ ] ๐Ÿ”„ **Setup monitoring dan logging** +- [ ] ๐Ÿ”„ **Deploy ke production** + +### ๐Ÿš€ Advanced Features + +- **๐Ÿ“Š Real-time Dashboard** +- **๐Ÿ”’ Enhanced Security (Rate limiting, CORS)** +- **๐Ÿ“ˆ Performance Monitoring** +- **๐ŸŒ API Versioning** +- **๐Ÿ“ฑ Mobile SDK Integration** + +*** + +**โšก Total setup time: 5 menit | ๐Ÿ”ง Generate Handler: 30 detik | ๐Ÿงช Testing: Langsung via Swagger** + +> **๐Ÿ’ก Pro Tip:** Gunakan `make help` untuk melihat semua command yang tersedia + +*** + diff --git a/cmd/api/main.go b/cmd/api/main.go new file mode 100644 index 0000000..8f4e5c3 --- /dev/null +++ b/cmd/api/main.go @@ -0,0 +1,86 @@ +package main + +import ( + "context" + "fmt" + "log" + "net/http" + "os/signal" + "syscall" + "time" + + "api-service/internal/server" + + "github.com/joho/godotenv" // Import the godotenv package + + _ "api-service/docs" +) + +// @title API Service +// @version 1.0.0 +// @description A comprehensive Go API service with Swagger documentation +// @termsOfService http://swagger.io/terms/ + +// @contact.name API Support +// @contact.url http://www.swagger.io/support +// @contact.email support@swagger.io + +// @license.name Apache 2.0 +// @license.url http://www.apache.org/licenses/LICENSE-2.0.html + +// @host localhost:8080 +// @BasePath /api/v1 +// @schemes http https + +func gracefulShutdown(apiServer *http.Server, done chan bool) { + // Create context that listens for the interrupt signal from the OS. + ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) + defer stop() + + // Listen for the interrupt signal. + <-ctx.Done() + + log.Println("shutting down gracefully, press Ctrl+C again to force") + stop() // Allow Ctrl+C to force shutdown + + // The context is used to inform the server it has 5 seconds to finish + // the request it is currently handling + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + if err := apiServer.Shutdown(ctx); err != nil { + log.Printf("Server forced to shutdown with error: %v", err) + } + + log.Println("Server exiting") + + // Notify the main goroutine that the shutdown is complete + done <- true +} + +func main() { + log.Println("Starting API Service...") + + // Load environment variables from .env file + if err := godotenv.Load(); err != nil { + log.Printf("Warning: .env file not found or could not be loaded: %v", err) + log.Println("Continuing with system environment variables...") + } + + server := server.NewServer() + + // Create a done channel to signal when the shutdown is complete + done := make(chan bool, 1) + + // Run graceful shutdown in a separate goroutine + go gracefulShutdown(server, done) + + log.Printf("Server starting on port %s", server.Addr) + err := server.ListenAndServe() + if err != nil && err != http.ErrServerClosed { + panic(fmt.Sprintf("http server error: %s", err)) + } + + // Wait for the graceful shutdown to complete + <-done + log.Println("Graceful shutdown complete.") +} diff --git a/cmd/logging/main.go b/cmd/logging/main.go new file mode 100644 index 0000000..97774de --- /dev/null +++ b/cmd/logging/main.go @@ -0,0 +1,109 @@ +package main + +import ( + "fmt" + "log" + "time" + + "api-service/pkg/logger" +) + +func main() { + fmt.Println("Testing Dynamic Logging Functions...") + fmt.Println("====================================") + + // Test fungsi penyimpanan log dinamis + testDynamicLogging() + + // Tunggu sebentar untuk memastikan goroutine selesai + time.Sleep(500 * time.Millisecond) + + fmt.Println("\n====================================") + fmt.Println("Dynamic logging test completed!") + fmt.Println("Check the log files in pkg/logger/data/ directory") +} + +func testDynamicLogging() { + // Buat logger instance + loggerInstance := logger.New("test-app", logger.DEBUG, false) + + // Test 1: Log dengan penyimpanan otomatis + fmt.Println("\n1. Testing automatic log saving...") + loggerInstance.LogAndSave(logger.INFO, "Application started successfully", map[string]interface{}{ + "version": "1.0.0", + "build_date": time.Now().Format("2006-01-02"), + "environment": "development", + }) + + // Test 2: Log dengan request context + fmt.Println("\n2. Testing log with request context...") + requestLogger := loggerInstance.WithRequestID("req-001").WithCorrelationID("corr-001") + requestLogger.LogAndSave(logger.INFO, "User login attempt", map[string]interface{}{ + "username": "john_doe", + "ip": "192.168.1.100", + "success": true, + }) + + // Test 3: Error logging + fmt.Println("\n3. Testing error logging...") + loggerInstance.LogAndSave(logger.ERROR, "Database connection failed", map[string]interface{}{ + "error": "connection timeout", + "retry_count": 3, + "host": "db.example.com:5432", + }) + + // Test 4: Manual log entry saving + fmt.Println("\n4. Testing manual log entry saving...") + manualEntry := logger.LogEntry{ + Timestamp: time.Now().Format(time.RFC3339), + Level: "DEBUG", + Service: "manual-test", + Message: "Manual log entry created", + RequestID: "manual-req-001", + CorrelationID: "manual-corr-001", + File: "main.go", + Line: 42, + Fields: map[string]interface{}{ + "custom_field": "test_value", + "number": 123, + "active": true, + }, + } + + // Simpan manual ke berbagai format + if err := logger.SaveLogText(manualEntry); err != nil { + log.Printf("Error saving text log: %v", err) + } else { + fmt.Println("โœ“ Text log saved successfully") + } + + if err := logger.SaveLogJSON(manualEntry); err != nil { + log.Printf("Error saving JSON log: %v", err) + } else { + fmt.Println("โœ“ JSON log saved successfully") + } + + if err := logger.SaveLogToDatabase(manualEntry); err != nil { + log.Printf("Error saving database log: %v", err) + } else { + fmt.Println("โœ“ Database log saved successfully") + } + + // Test 5: Performance logging dengan durasi + fmt.Println("\n5. Testing performance logging...") + start := time.Now() + + // Simulasi proses yang memakan waktu + time.Sleep(200 * time.Millisecond) + + duration := time.Since(start) + loggerInstance.LogAndSave(logger.INFO, "Data processing completed", map[string]interface{}{ + "operation": "data_import", + "duration": duration.String(), + "duration_ms": duration.Milliseconds(), + "records": 1000, + "throughput": fmt.Sprintf("%.2f records/ms", 1000/float64(duration.Milliseconds())), + }) + + fmt.Println("\nโœ“ All logging tests completed successfully!") +} diff --git a/diagnostic/main.go b/diagnostic/main.go new file mode 100644 index 0000000..e1c5a2c --- /dev/null +++ b/diagnostic/main.go @@ -0,0 +1,130 @@ +package main + +import ( + "database/sql" + "fmt" + "log" + "os" + + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/joho/godotenv" +) + +func main() { + fmt.Println("=== Database Connection Diagnostic Tool ===") + + // Load environment variables from .env file + if err := godotenv.Load(); err != nil { + log.Printf("Warning: Error loading .env file: %v", err) + } + + // Get configuration from environment + host := os.Getenv("DB_HOST") + port := os.Getenv("DB_PORT") + username := os.Getenv("DB_USERNAME") + password := os.Getenv("DB_PASSWORD") + database := os.Getenv("DB_DATABASE") + sslmode := os.Getenv("DB_SSLMODE") + + if sslmode == "" { + sslmode = "disable" + } + + fmt.Printf("Host: %s\n", host) + fmt.Printf("Port: %s\n", port) + fmt.Printf("Username: %s\n", username) + fmt.Printf("Database: %s\n", database) + fmt.Printf("SSL Mode: %s\n", sslmode) + + if host == "" || username == "" || password == "" { + fmt.Println("โŒ Missing required environment variables") + return + } + + // Test connection to PostgreSQL server + fmt.Println("\n--- Testing PostgreSQL Server Connection ---") + serverConnStr := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=postgres sslmode=%s", + host, port, username, password, sslmode) + + db, err := sql.Open("pgx", serverConnStr) + if err != nil { + fmt.Printf("โŒ Failed to connect to PostgreSQL server: %v\n", err) + return + } + defer db.Close() + + err = db.Ping() + if err != nil { + fmt.Printf("โŒ Failed to ping PostgreSQL server: %v\n", err) + return + } + + fmt.Println("โœ… Successfully connected to PostgreSQL server") + + // Check if database exists + fmt.Println("\n--- Checking Database Existence ---") + var exists bool + err = db.QueryRow("SELECT EXISTS(SELECT 1 FROM pg_database WHERE datname = $1)", database).Scan(&exists) + if err != nil { + fmt.Printf("โŒ Failed to check database existence: %v\n", err) + return + } + + if !exists { + fmt.Printf("โŒ Database '%s' does not exist\n", database) + + // List available databases + fmt.Println("\n--- Available Databases ---") + rows, err := db.Query("SELECT datname FROM pg_database WHERE datistemplate = false ORDER BY datname") + if err != nil { + fmt.Printf("โŒ Failed to list databases: %v\n", err) + return + } + defer rows.Close() + + fmt.Println("Available databases:") + for rows.Next() { + var dbName string + if err := rows.Scan(&dbName); err != nil { + continue + } + fmt.Printf(" - %s\n", dbName) + } + return + } + + fmt.Printf("โœ… Database '%s' exists\n", database) + + // Test direct connection to the database + fmt.Println("\n--- Testing Direct Database Connection ---") + directConnStr := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=%s", + host, port, username, password, database, sslmode) + + targetDB, err := sql.Open("pgx", directConnStr) + if err != nil { + fmt.Printf("โŒ Failed to connect to database '%s': %v\n", database, err) + return + } + defer targetDB.Close() + + err = targetDB.Ping() + if err != nil { + fmt.Printf("โŒ Failed to ping database '%s': %v\n", database, err) + return + } + + fmt.Printf("โœ… Successfully connected to database '%s'\n", database) + + // Test basic query + fmt.Println("\n--- Testing Basic Query ---") + var version string + err = targetDB.QueryRow("SELECT version()").Scan(&version) + if err != nil { + fmt.Printf("โŒ Failed to execute query: %v\n", err) + return + } + + fmt.Printf("โœ… PostgreSQL Version: %s\n", version) + + fmt.Println("\n๐ŸŽ‰ All tests passed! Database connection is working correctly.") +} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..17b58e9 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,194 @@ +services: + # # PostgreSQL Database + # psql_bp: + # image: postgres:15-alpine + # restart: unless-stopped + # environment: + # POSTGRES_USER: stim + # POSTGRES_PASSWORD: stim*RS54 + # POSTGRES_DB: satu_db + # ports: + # - "5432:5432" + # volumes: + # - postgres_data:/var/lib/postgresql/data + # healthcheck: + # test: ["CMD-SHELL", "pg_isready -U stim -d satu_db"] + # interval: 10s + # timeout: 5s + # retries: 5 + # networks: + # - blueprint + + # # MongoDB Database + # mongodb: + # image: mongo:7-jammy + # restart: unless-stopped + # environment: + # MONGO_INITDB_ROOT_USERNAME: admin + # MONGO_INITDB_ROOT_PASSWORD: stim*rs54 + # ports: + # - "27017:27017" + # volumes: + # - mongodb_data:/data/db + # networks: + # - blueprint + + # # MySQL Antrian Database + # mysql_antrian: + # image: mysql:8.0 + # restart: unless-stopped + # environment: + # MYSQL_ROOT_PASSWORD: www-data + # MYSQL_USER: www-data + # MYSQL_PASSWORD: www-data + # MYSQL_DATABASE: antrian_rssa + # ports: + # - "3306:3306" + # volumes: + # - mysql_antrian_data:/var/lib/mysql + # healthcheck: + # test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + # interval: 10s + # timeout: 5s + # retries: 5 + # networks: + # - blueprint + + # # MySQL Medical Database + # mysql_medical: + # image: mysql:8.0 + # restart: unless-stopped + # environment: + # MYSQL_ROOT_PASSWORD: meninjar*RS54 + # MYSQL_USER: meninjardev + # MYSQL_PASSWORD: meninjar*RS54 + # MYSQL_DATABASE: healtcare_database + # ports: + # - "3307:3306" + # volumes: + # - mysql_medical_data:/var/lib/mysql + # healthcheck: + # test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + # interval: 10s + # timeout: 5s + # retries: 5 + # networks: + # - blueprint + + # Main Application + app: + build: + context: . + dockerfile: Dockerfile + target: prod + restart: unless-stopped + ports: + - "8080:8080" + environment: + # Server Configuration + APP_ENV: production + PORT: 8080 + GIN_MODE: release + + # Default Database Configuration (PostgreSQL) + DB_CONNECTION: postgres + DB_USERNAME: stim + DB_PASSWORD: stim*RS54 + DB_HOST: 10.10.123.165 + DB_DATABASE: satu_db + DB_PORT: 5432 + DB_SSLMODE: disable + + # satudata Database Configuration (PostgreSQL) + POSTGRES_SATUDATA_CONNECTION: postgres + POSTGRES_SATUDATA_USERNAME: stim + POSTGRES_SATUDATA_PASSWORD: stim*RS54 + POSTGRES_SATUDATA_HOST: 10.10.123.165 + POSTGRES_SATUDATA_DATABASE: satu_db + POSTGRES_SATUDATA_PORT: 5432 + POSTGRES_SATUDATA_SSLMODE: disable + + # Mongo Database + MONGODB_MONGOHL7_CONNECTION: mongodb + MONGODB_MONGOHL7_HOST: 10.10.123.206 + MONGODB_MONGOHL7_PORT: 27017 + MONGODB_MONGOHL7_USER: admin + MONGODB_MONGOHL7_PASS: stim*rs54 + MONGODB_MONGOHL7_MASTER: master + MONGODB_MONGOHL7_LOCAL: local + MONGODB_MONGOHL7_SSLMODE: disable + + # MYSQL Antrian Database + # MYSQL_ANTRIAN_CONNECTION: mysql + # MYSQL_ANTRIAN_HOST: mysql_antrian + # MYSQL_ANTRIAN_USERNAME: www-data + # MYSQL_ANTRIAN_PASSWORD: www-data + # MYSQL_ANTRIAN_DATABASE: antrian_rssa + # MYSQL_ANTRIAN_PORT: 3306 + # MYSQL_ANTRIAN_SSLMODE: disable + + # MYSQL Medical Database + MYSQL_MEDICAL_CONNECTION: mysql + MYSQL_MEDICAL_HOST: 10.10.123.163 + MYSQL_MEDICAL_USERNAME: meninjardev + MYSQL_MEDICAL_PASSWORD: meninjar*RS54 + MYSQL_MEDICAL_DATABASE: healtcare_database + MYSQL_MEDICAL_PORT: 3306 + MYSQL_MEDICAL_SSLMODE: disable + + # Keycloak Configuration + KEYCLOAK_ISSUER: https://auth.rssa.top/realms/sandbox + KEYCLOAK_AUDIENCE: nuxtsim-pendaftaran + KEYCLOAK_JWKS_URL: https://auth.rssa.top/realms/sandbox/protocol/openid-connect/certs + KEYCLOAK_ENABLED: true + + # BPJS Configuration + BPJS_BASEURL: https://apijkn.bpjs-kesehatan.go.id/vclaim-rest + BPJS_CONSID: 5257 + BPJS_USERKEY: 4cf1cbef8c008440bbe9ef9ba789e482 + BPJS_SECRETKEY: 1bV363512D + + # SatuSehat Configuration + BRIDGING_SATUSEHAT_ORG_ID: 100026555 + BRIDGING_SATUSEHAT_FASYAKES_ID: 3573011 + BRIDGING_SATUSEHAT_CLIENT_ID: l1ZgJGW6K5pnrqGUikWM7fgIoquA2AQ5UUG0U8WqHaq2VEyZ + BRIDGING_SATUSEHAT_CLIENT_SECRET: Al3PTYAW6axPiAFwaFlpn8qShLFW5YGMgG8w1qhexgCc7lGTEjjcR6zxa06ThPDy + BRIDGING_SATUSEHAT_AUTH_URL: https://api-satusehat.kemkes.go.id/oauth2/v1 + BRIDGING_SATUSEHAT_BASE_URL: https://api-satusehat.kemkes.go.id/fhir-r4/v1 + BRIDGING_SATUSEHAT_CONSENT_URL: https://api-satusehat.dto.kemkes.go.id/consent/v1 + BRIDGING_SATUSEHAT_KFA_URL: https://api-satusehat.kemkes.go.id/kfa-v2 + + # Swagger Configuration + SWAGGER_TITLE: My Custom API Service + SWAGGER_DESCRIPTION: This is a custom API service for managing various resources + SWAGGER_VERSION: 2.0.0 + SWAGGER_CONTACT_NAME: Support Team + SWAGGER_HOST: api.mycompany.com:8080 + SWAGGER_BASE_PATH: /api/v2 + SWAGGER_SCHEMES: https + + # API Configuration + API_TITLE: API Service UJICOBA + API_DESCRIPTION: Dokumentation SWAGGER + API_VERSION: 3.0.0 + + # depends_on: + # psql_bp: + # condition: service_healthy + # mongodb: + # condition: service_started + # mysql_antrian: + # condition: service_healthy + # mysql_medical: + # condition: service_healthy + networks: + - goservice + +# volumes: +# postgres_data: +# mongodb_data: +# mysql_antrian_data: +# mysql_medical_data: + +networks: + goservice: diff --git a/docs/docs.go b/docs/docs.go new file mode 100644 index 0000000..768acfe --- /dev/null +++ b/docs/docs.go @@ -0,0 +1,1169 @@ +// Code generated by swaggo/swag. DO NOT EDIT. + +package docs + +import "github.com/swaggo/swag" + +const docTemplate = `{ + "schemes": {{ marshal .Schemes }}, + "swagger": "2.0", + "info": { + "description": "{{escape .Description}}", + "title": "{{.Title}}", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "API Support", + "url": "http://www.swagger.io/support", + "email": "support@swagger.io" + }, + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + }, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/api/v1/auth/login": { + "post": { + "description": "Authenticate user with username and password to receive JWT token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Login user and get JWT token", + "parameters": [ + { + "description": "Login credentials", + "name": "login", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/models.LoginRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/me": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "Get information about the currently authenticated user", + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get current user info", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.User" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/refresh": { + "post": { + "description": "Refresh the JWT token using a valid refresh token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Refresh JWT token", + "parameters": [ + { + "description": "Refresh token", + "name": "refresh", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/register": { + "post": { + "description": "Register a new user account", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Register new user", + "parameters": [ + { + "description": "Registration data", + "name": "register", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/retribusi/{id}": { + "get": { + "description": "Returns a single retribusi by ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get Retribusi by ID", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetByIDResponse" + } + }, + "400": { + "description": "Invalid ID format", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "put": { + "description": "Updates an existing retribusi record", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Update retribusi", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "Retribusi update request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/retribusi.RetribusiUpdateRequest" + } + } + ], + "responses": { + "200": { + "description": "Retribusi updated successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiUpdateResponse" + } + }, + "400": { + "description": "Bad request or validation error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "delete": { + "description": "Soft deletes a retribusi by setting status to 'deleted'", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Delete retribusi", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Retribusi deleted successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiDeleteResponse" + } + }, + "400": { + "description": "Invalid ID format", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis": { + "get": { + "description": "Returns a paginated list of retribusis with optional summary statistics", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi with pagination and optional aggregation", + "parameters": [ + { + "type": "integer", + "default": 10, + "description": "Limit (max 100)", + "name": "limit", + "in": "query" + }, + { + "type": "integer", + "default": 0, + "description": "Offset", + "name": "offset", + "in": "query" + }, + { + "type": "boolean", + "default": false, + "description": "Include aggregation summary", + "name": "include_summary", + "in": "query" + }, + { + "type": "string", + "description": "Filter by status", + "name": "status", + "in": "query" + }, + { + "type": "string", + "description": "Filter by jenis", + "name": "jenis", + "in": "query" + }, + { + "type": "string", + "description": "Filter by dinas", + "name": "dinas", + "in": "query" + }, + { + "type": "string", + "description": "Search in multiple fields", + "name": "search", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "post": { + "description": "Creates a new retribusi record", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Create retribusi", + "parameters": [ + { + "description": "Retribusi creation request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/retribusi.RetribusiCreateRequest" + } + } + ], + "responses": { + "201": { + "description": "Retribusi created successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiCreateResponse" + } + }, + "400": { + "description": "Bad request or validation error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis/dynamic": { + "get": { + "description": "Returns retribusis with advanced dynamic filtering like Directus", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi with dynamic filtering", + "parameters": [ + { + "type": "string", + "description": "Fields to select (e.g., fields=*.*)", + "name": "fields", + "in": "query" + }, + { + "type": "string", + "description": "Dynamic filters (e.g., filter[Jenis][_eq]=value)", + "name": "filter[column][operator]", + "in": "query" + }, + { + "type": "string", + "description": "Sort fields (e.g., sort=date_created,-Jenis)", + "name": "sort", + "in": "query" + }, + { + "type": "integer", + "default": 10, + "description": "Limit", + "name": "limit", + "in": "query" + }, + { + "type": "integer", + "default": 0, + "description": "Offset", + "name": "offset", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis/stats": { + "get": { + "description": "Returns comprehensive statistics about retribusi data", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi statistics", + "parameters": [ + { + "type": "string", + "description": "Filter statistics by status", + "name": "status", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Statistics data", + "schema": { + "$ref": "#/definitions/models.AggregateData" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/token/generate": { + "post": { + "description": "Generate a JWT token for a user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Token" + ], + "summary": "Generate JWT token", + "parameters": [ + { + "description": "User credentials", + "name": "token", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/models.LoginRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/token/generate-direct": { + "post": { + "description": "Generate a JWT token directly without password verification (for testing)", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Token" + ], + "summary": "Generate token directly", + "parameters": [ + { + "description": "User info", + "name": "user", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + } + }, + "definitions": { + "models.AggregateData": { + "type": "object", + "properties": { + "by_dinas": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "by_jenis": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "by_status": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "created_today": { + "type": "integer" + }, + "last_updated": { + "type": "string" + }, + "total_active": { + "type": "integer" + }, + "total_draft": { + "type": "integer" + }, + "total_inactive": { + "type": "integer" + }, + "updated_today": { + "type": "integer" + } + } + }, + "models.ErrorResponse": { + "type": "object", + "properties": { + "code": { + "type": "integer" + }, + "error": { + "type": "string" + }, + "message": { + "type": "string" + }, + "timestamp": { + "type": "string" + } + } + }, + "models.LoginRequest": { + "type": "object", + "required": [ + "password", + "username" + ], + "properties": { + "password": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "models.MetaResponse": { + "type": "object", + "properties": { + "current_page": { + "type": "integer" + }, + "has_next": { + "type": "boolean" + }, + "has_prev": { + "type": "boolean" + }, + "limit": { + "type": "integer" + }, + "offset": { + "type": "integer" + }, + "total": { + "type": "integer" + }, + "total_pages": { + "type": "integer" + } + } + }, + "models.NullableInt32": { + "type": "object", + "properties": { + "int32": { + "type": "integer" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.NullableString": { + "type": "object", + "properties": { + "string": { + "type": "string" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.NullableTime": { + "type": "object", + "properties": { + "time": { + "type": "string" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.TokenResponse": { + "type": "object", + "properties": { + "access_token": { + "type": "string" + }, + "expires_in": { + "type": "integer" + }, + "token_type": { + "type": "string" + } + } + }, + "models.User": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "role": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "retribusi.Retribusi": { + "type": "object", + "properties": { + "date_created": { + "$ref": "#/definitions/models.NullableTime" + }, + "date_updated": { + "$ref": "#/definitions/models.NullableTime" + }, + "dinas": { + "$ref": "#/definitions/models.NullableString" + }, + "id": { + "type": "string" + }, + "jenis": { + "$ref": "#/definitions/models.NullableString" + }, + "kelompok_obyek": { + "$ref": "#/definitions/models.NullableString" + }, + "kode_tarif": { + "$ref": "#/definitions/models.NullableString" + }, + "pelayanan": { + "$ref": "#/definitions/models.NullableString" + }, + "rekening_denda": { + "$ref": "#/definitions/models.NullableString" + }, + "rekening_pokok": { + "$ref": "#/definitions/models.NullableString" + }, + "satuan": { + "$ref": "#/definitions/models.NullableString" + }, + "satuan_overtime": { + "$ref": "#/definitions/models.NullableString" + }, + "sort": { + "$ref": "#/definitions/models.NullableInt32" + }, + "status": { + "type": "string" + }, + "tarif": { + "$ref": "#/definitions/models.NullableString" + }, + "tarif_overtime": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_1": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_2": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_3": { + "$ref": "#/definitions/models.NullableString" + }, + "user_created": { + "$ref": "#/definitions/models.NullableString" + }, + "user_updated": { + "$ref": "#/definitions/models.NullableString" + } + } + }, + "retribusi.RetribusiCreateRequest": { + "type": "object", + "required": [ + "status" + ], + "properties": { + "dinas": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "jenis": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kelompok_obyek": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kode_tarif": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "pelayanan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_denda": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_pokok": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan_overtime": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "status": { + "type": "string", + "enum": [ + "draft", + "active", + "inactive" + ] + }, + "tarif": { + "type": "string" + }, + "tarif_overtime": { + "type": "string" + }, + "uraian_1": { + "type": "string" + }, + "uraian_2": { + "type": "string" + }, + "uraian_3": { + "type": "string" + } + } + }, + "retribusi.RetribusiCreateResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiDeleteResponse": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiGetByIDResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiGetResponse": { + "type": "object", + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/definitions/retribusi.Retribusi" + } + }, + "message": { + "type": "string" + }, + "meta": { + "$ref": "#/definitions/models.MetaResponse" + }, + "summary": { + "$ref": "#/definitions/models.AggregateData" + } + } + }, + "retribusi.RetribusiUpdateRequest": { + "type": "object", + "required": [ + "status" + ], + "properties": { + "dinas": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "jenis": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kelompok_obyek": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kode_tarif": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "pelayanan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_denda": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_pokok": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan_overtime": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "status": { + "type": "string", + "enum": [ + "draft", + "active", + "inactive" + ] + }, + "tarif": { + "type": "string" + }, + "tarif_overtime": { + "type": "string" + }, + "uraian_1": { + "type": "string" + }, + "uraian_2": { + "type": "string" + }, + "uraian_3": { + "type": "string" + } + } + }, + "retribusi.RetribusiUpdateResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + } + } +}` + +// SwaggerInfo holds exported Swagger Info so clients can modify it +var SwaggerInfo = &swag.Spec{ + Version: "1.0.0", + Host: "localhost:8080", + BasePath: "/api/v1", + Schemes: []string{"http", "https"}, + Title: "API Service", + Description: "A comprehensive Go API service with Swagger documentation", + InfoInstanceName: "swagger", + SwaggerTemplate: docTemplate, +} + +func init() { + swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo) +} diff --git a/docs/swagger.json b/docs/swagger.json new file mode 100644 index 0000000..ff74f9c --- /dev/null +++ b/docs/swagger.json @@ -0,0 +1,1150 @@ +{ + "schemes": [ + "http", + "https" + ], + "swagger": "2.0", + "info": { + "description": "A comprehensive Go API service with Swagger documentation", + "title": "API Service", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "API Support", + "url": "http://www.swagger.io/support", + "email": "support@swagger.io" + }, + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + }, + "version": "1.0.0" + }, + "host": "localhost:8080", + "basePath": "/api/v1", + "paths": { + "/api/v1/auth/login": { + "post": { + "description": "Authenticate user with username and password to receive JWT token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Login user and get JWT token", + "parameters": [ + { + "description": "Login credentials", + "name": "login", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/models.LoginRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/me": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "Get information about the currently authenticated user", + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get current user info", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.User" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/refresh": { + "post": { + "description": "Refresh the JWT token using a valid refresh token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Refresh JWT token", + "parameters": [ + { + "description": "Refresh token", + "name": "refresh", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/register": { + "post": { + "description": "Register a new user account", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Register new user", + "parameters": [ + { + "description": "Registration data", + "name": "register", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/retribusi/{id}": { + "get": { + "description": "Returns a single retribusi by ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get Retribusi by ID", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetByIDResponse" + } + }, + "400": { + "description": "Invalid ID format", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "put": { + "description": "Updates an existing retribusi record", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Update retribusi", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "Retribusi update request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/retribusi.RetribusiUpdateRequest" + } + } + ], + "responses": { + "200": { + "description": "Retribusi updated successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiUpdateResponse" + } + }, + "400": { + "description": "Bad request or validation error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "delete": { + "description": "Soft deletes a retribusi by setting status to 'deleted'", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Delete retribusi", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Retribusi deleted successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiDeleteResponse" + } + }, + "400": { + "description": "Invalid ID format", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis": { + "get": { + "description": "Returns a paginated list of retribusis with optional summary statistics", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi with pagination and optional aggregation", + "parameters": [ + { + "type": "integer", + "default": 10, + "description": "Limit (max 100)", + "name": "limit", + "in": "query" + }, + { + "type": "integer", + "default": 0, + "description": "Offset", + "name": "offset", + "in": "query" + }, + { + "type": "boolean", + "default": false, + "description": "Include aggregation summary", + "name": "include_summary", + "in": "query" + }, + { + "type": "string", + "description": "Filter by status", + "name": "status", + "in": "query" + }, + { + "type": "string", + "description": "Filter by jenis", + "name": "jenis", + "in": "query" + }, + { + "type": "string", + "description": "Filter by dinas", + "name": "dinas", + "in": "query" + }, + { + "type": "string", + "description": "Search in multiple fields", + "name": "search", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "post": { + "description": "Creates a new retribusi record", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Create retribusi", + "parameters": [ + { + "description": "Retribusi creation request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/retribusi.RetribusiCreateRequest" + } + } + ], + "responses": { + "201": { + "description": "Retribusi created successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiCreateResponse" + } + }, + "400": { + "description": "Bad request or validation error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis/dynamic": { + "get": { + "description": "Returns retribusis with advanced dynamic filtering like Directus", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi with dynamic filtering", + "parameters": [ + { + "type": "string", + "description": "Fields to select (e.g., fields=*.*)", + "name": "fields", + "in": "query" + }, + { + "type": "string", + "description": "Dynamic filters (e.g., filter[Jenis][_eq]=value)", + "name": "filter[column][operator]", + "in": "query" + }, + { + "type": "string", + "description": "Sort fields (e.g., sort=date_created,-Jenis)", + "name": "sort", + "in": "query" + }, + { + "type": "integer", + "default": 10, + "description": "Limit", + "name": "limit", + "in": "query" + }, + { + "type": "integer", + "default": 0, + "description": "Offset", + "name": "offset", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis/stats": { + "get": { + "description": "Returns comprehensive statistics about retribusi data", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi statistics", + "parameters": [ + { + "type": "string", + "description": "Filter statistics by status", + "name": "status", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Statistics data", + "schema": { + "$ref": "#/definitions/models.AggregateData" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/token/generate": { + "post": { + "description": "Generate a JWT token for a user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Token" + ], + "summary": "Generate JWT token", + "parameters": [ + { + "description": "User credentials", + "name": "token", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/models.LoginRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/token/generate-direct": { + "post": { + "description": "Generate a JWT token directly without password verification (for testing)", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Token" + ], + "summary": "Generate token directly", + "parameters": [ + { + "description": "User info", + "name": "user", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + } + }, + "definitions": { + "models.AggregateData": { + "type": "object", + "properties": { + "by_dinas": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "by_jenis": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "by_status": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "created_today": { + "type": "integer" + }, + "last_updated": { + "type": "string" + }, + "total_active": { + "type": "integer" + }, + "total_draft": { + "type": "integer" + }, + "total_inactive": { + "type": "integer" + }, + "updated_today": { + "type": "integer" + } + } + }, + "models.ErrorResponse": { + "type": "object", + "properties": { + "code": { + "type": "integer" + }, + "error": { + "type": "string" + }, + "message": { + "type": "string" + }, + "timestamp": { + "type": "string" + } + } + }, + "models.LoginRequest": { + "type": "object", + "required": [ + "password", + "username" + ], + "properties": { + "password": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "models.MetaResponse": { + "type": "object", + "properties": { + "current_page": { + "type": "integer" + }, + "has_next": { + "type": "boolean" + }, + "has_prev": { + "type": "boolean" + }, + "limit": { + "type": "integer" + }, + "offset": { + "type": "integer" + }, + "total": { + "type": "integer" + }, + "total_pages": { + "type": "integer" + } + } + }, + "models.NullableInt32": { + "type": "object", + "properties": { + "int32": { + "type": "integer" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.NullableString": { + "type": "object", + "properties": { + "string": { + "type": "string" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.NullableTime": { + "type": "object", + "properties": { + "time": { + "type": "string" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.TokenResponse": { + "type": "object", + "properties": { + "access_token": { + "type": "string" + }, + "expires_in": { + "type": "integer" + }, + "token_type": { + "type": "string" + } + } + }, + "models.User": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "role": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "retribusi.Retribusi": { + "type": "object", + "properties": { + "date_created": { + "$ref": "#/definitions/models.NullableTime" + }, + "date_updated": { + "$ref": "#/definitions/models.NullableTime" + }, + "dinas": { + "$ref": "#/definitions/models.NullableString" + }, + "id": { + "type": "string" + }, + "jenis": { + "$ref": "#/definitions/models.NullableString" + }, + "kelompok_obyek": { + "$ref": "#/definitions/models.NullableString" + }, + "kode_tarif": { + "$ref": "#/definitions/models.NullableString" + }, + "pelayanan": { + "$ref": "#/definitions/models.NullableString" + }, + "rekening_denda": { + "$ref": "#/definitions/models.NullableString" + }, + "rekening_pokok": { + "$ref": "#/definitions/models.NullableString" + }, + "satuan": { + "$ref": "#/definitions/models.NullableString" + }, + "satuan_overtime": { + "$ref": "#/definitions/models.NullableString" + }, + "sort": { + "$ref": "#/definitions/models.NullableInt32" + }, + "status": { + "type": "string" + }, + "tarif": { + "$ref": "#/definitions/models.NullableString" + }, + "tarif_overtime": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_1": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_2": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_3": { + "$ref": "#/definitions/models.NullableString" + }, + "user_created": { + "$ref": "#/definitions/models.NullableString" + }, + "user_updated": { + "$ref": "#/definitions/models.NullableString" + } + } + }, + "retribusi.RetribusiCreateRequest": { + "type": "object", + "required": [ + "status" + ], + "properties": { + "dinas": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "jenis": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kelompok_obyek": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kode_tarif": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "pelayanan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_denda": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_pokok": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan_overtime": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "status": { + "type": "string", + "enum": [ + "draft", + "active", + "inactive" + ] + }, + "tarif": { + "type": "string" + }, + "tarif_overtime": { + "type": "string" + }, + "uraian_1": { + "type": "string" + }, + "uraian_2": { + "type": "string" + }, + "uraian_3": { + "type": "string" + } + } + }, + "retribusi.RetribusiCreateResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiDeleteResponse": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiGetByIDResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiGetResponse": { + "type": "object", + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/definitions/retribusi.Retribusi" + } + }, + "message": { + "type": "string" + }, + "meta": { + "$ref": "#/definitions/models.MetaResponse" + }, + "summary": { + "$ref": "#/definitions/models.AggregateData" + } + } + }, + "retribusi.RetribusiUpdateRequest": { + "type": "object", + "required": [ + "status" + ], + "properties": { + "dinas": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "jenis": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kelompok_obyek": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kode_tarif": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "pelayanan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_denda": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_pokok": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan_overtime": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "status": { + "type": "string", + "enum": [ + "draft", + "active", + "inactive" + ] + }, + "tarif": { + "type": "string" + }, + "tarif_overtime": { + "type": "string" + }, + "uraian_1": { + "type": "string" + }, + "uraian_2": { + "type": "string" + }, + "uraian_3": { + "type": "string" + } + } + }, + "retribusi.RetribusiUpdateResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/docs/swagger.yaml b/docs/swagger.yaml new file mode 100644 index 0000000..677284f --- /dev/null +++ b/docs/swagger.yaml @@ -0,0 +1,771 @@ +basePath: /api/v1 +definitions: + models.AggregateData: + properties: + by_dinas: + additionalProperties: + type: integer + type: object + by_jenis: + additionalProperties: + type: integer + type: object + by_status: + additionalProperties: + type: integer + type: object + created_today: + type: integer + last_updated: + type: string + total_active: + type: integer + total_draft: + type: integer + total_inactive: + type: integer + updated_today: + type: integer + type: object + models.ErrorResponse: + properties: + code: + type: integer + error: + type: string + message: + type: string + timestamp: + type: string + type: object + models.LoginRequest: + properties: + password: + type: string + username: + type: string + required: + - password + - username + type: object + models.MetaResponse: + properties: + current_page: + type: integer + has_next: + type: boolean + has_prev: + type: boolean + limit: + type: integer + offset: + type: integer + total: + type: integer + total_pages: + type: integer + type: object + models.NullableInt32: + properties: + int32: + type: integer + valid: + type: boolean + type: object + models.NullableString: + properties: + string: + type: string + valid: + type: boolean + type: object + models.NullableTime: + properties: + time: + type: string + valid: + type: boolean + type: object + models.TokenResponse: + properties: + access_token: + type: string + expires_in: + type: integer + token_type: + type: string + type: object + models.User: + properties: + email: + type: string + id: + type: string + role: + type: string + username: + type: string + type: object + retribusi.Retribusi: + properties: + date_created: + $ref: '#/definitions/models.NullableTime' + date_updated: + $ref: '#/definitions/models.NullableTime' + dinas: + $ref: '#/definitions/models.NullableString' + id: + type: string + jenis: + $ref: '#/definitions/models.NullableString' + kelompok_obyek: + $ref: '#/definitions/models.NullableString' + kode_tarif: + $ref: '#/definitions/models.NullableString' + pelayanan: + $ref: '#/definitions/models.NullableString' + rekening_denda: + $ref: '#/definitions/models.NullableString' + rekening_pokok: + $ref: '#/definitions/models.NullableString' + satuan: + $ref: '#/definitions/models.NullableString' + satuan_overtime: + $ref: '#/definitions/models.NullableString' + sort: + $ref: '#/definitions/models.NullableInt32' + status: + type: string + tarif: + $ref: '#/definitions/models.NullableString' + tarif_overtime: + $ref: '#/definitions/models.NullableString' + uraian_1: + $ref: '#/definitions/models.NullableString' + uraian_2: + $ref: '#/definitions/models.NullableString' + uraian_3: + $ref: '#/definitions/models.NullableString' + user_created: + $ref: '#/definitions/models.NullableString' + user_updated: + $ref: '#/definitions/models.NullableString' + type: object + retribusi.RetribusiCreateRequest: + properties: + dinas: + maxLength: 255 + minLength: 1 + type: string + jenis: + maxLength: 255 + minLength: 1 + type: string + kelompok_obyek: + maxLength: 255 + minLength: 1 + type: string + kode_tarif: + maxLength: 255 + minLength: 1 + type: string + pelayanan: + maxLength: 255 + minLength: 1 + type: string + rekening_denda: + maxLength: 255 + minLength: 1 + type: string + rekening_pokok: + maxLength: 255 + minLength: 1 + type: string + satuan: + maxLength: 255 + minLength: 1 + type: string + satuan_overtime: + maxLength: 255 + minLength: 1 + type: string + status: + enum: + - draft + - active + - inactive + type: string + tarif: + type: string + tarif_overtime: + type: string + uraian_1: + type: string + uraian_2: + type: string + uraian_3: + type: string + required: + - status + type: object + retribusi.RetribusiCreateResponse: + properties: + data: + $ref: '#/definitions/retribusi.Retribusi' + message: + type: string + type: object + retribusi.RetribusiDeleteResponse: + properties: + id: + type: string + message: + type: string + type: object + retribusi.RetribusiGetByIDResponse: + properties: + data: + $ref: '#/definitions/retribusi.Retribusi' + message: + type: string + type: object + retribusi.RetribusiGetResponse: + properties: + data: + items: + $ref: '#/definitions/retribusi.Retribusi' + type: array + message: + type: string + meta: + $ref: '#/definitions/models.MetaResponse' + summary: + $ref: '#/definitions/models.AggregateData' + type: object + retribusi.RetribusiUpdateRequest: + properties: + dinas: + maxLength: 255 + minLength: 1 + type: string + jenis: + maxLength: 255 + minLength: 1 + type: string + kelompok_obyek: + maxLength: 255 + minLength: 1 + type: string + kode_tarif: + maxLength: 255 + minLength: 1 + type: string + pelayanan: + maxLength: 255 + minLength: 1 + type: string + rekening_denda: + maxLength: 255 + minLength: 1 + type: string + rekening_pokok: + maxLength: 255 + minLength: 1 + type: string + satuan: + maxLength: 255 + minLength: 1 + type: string + satuan_overtime: + maxLength: 255 + minLength: 1 + type: string + status: + enum: + - draft + - active + - inactive + type: string + tarif: + type: string + tarif_overtime: + type: string + uraian_1: + type: string + uraian_2: + type: string + uraian_3: + type: string + required: + - status + type: object + retribusi.RetribusiUpdateResponse: + properties: + data: + $ref: '#/definitions/retribusi.Retribusi' + message: + type: string + type: object +host: localhost:8080 +info: + contact: + email: support@swagger.io + name: API Support + url: http://www.swagger.io/support + description: A comprehensive Go API service with Swagger documentation + license: + name: Apache 2.0 + url: http://www.apache.org/licenses/LICENSE-2.0.html + termsOfService: http://swagger.io/terms/ + title: API Service + version: 1.0.0 +paths: + /api/v1/auth/login: + post: + consumes: + - application/json + description: Authenticate user with username and password to receive JWT token + parameters: + - description: Login credentials + in: body + name: login + required: true + schema: + $ref: '#/definitions/models.LoginRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.TokenResponse' + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + "401": + description: Unauthorized + schema: + additionalProperties: + type: string + type: object + summary: Login user and get JWT token + tags: + - Authentication + /api/v1/auth/me: + get: + description: Get information about the currently authenticated user + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.User' + "401": + description: Unauthorized + schema: + additionalProperties: + type: string + type: object + security: + - Bearer: [] + summary: Get current user info + tags: + - Authentication + /api/v1/auth/refresh: + post: + consumes: + - application/json + description: Refresh the JWT token using a valid refresh token + parameters: + - description: Refresh token + in: body + name: refresh + required: true + schema: + additionalProperties: + type: string + type: object + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.TokenResponse' + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + "401": + description: Unauthorized + schema: + additionalProperties: + type: string + type: object + summary: Refresh JWT token + tags: + - Authentication + /api/v1/auth/register: + post: + consumes: + - application/json + description: Register a new user account + parameters: + - description: Registration data + in: body + name: register + required: true + schema: + additionalProperties: + type: string + type: object + produces: + - application/json + responses: + "201": + description: Created + schema: + additionalProperties: + type: string + type: object + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + summary: Register new user + tags: + - Authentication + /api/v1/retribusi/{id}: + delete: + consumes: + - application/json + description: Soft deletes a retribusi by setting status to 'deleted' + parameters: + - description: Retribusi ID (UUID) + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: Retribusi deleted successfully + schema: + $ref: '#/definitions/retribusi.RetribusiDeleteResponse' + "400": + description: Invalid ID format + schema: + $ref: '#/definitions/models.ErrorResponse' + "404": + description: Retribusi not found + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Delete retribusi + tags: + - Retribusi + get: + consumes: + - application/json + description: Returns a single retribusi by ID + parameters: + - description: Retribusi ID (UUID) + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: Success response + schema: + $ref: '#/definitions/retribusi.RetribusiGetByIDResponse' + "400": + description: Invalid ID format + schema: + $ref: '#/definitions/models.ErrorResponse' + "404": + description: Retribusi not found + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Get Retribusi by ID + tags: + - Retribusi + put: + consumes: + - application/json + description: Updates an existing retribusi record + parameters: + - description: Retribusi ID (UUID) + in: path + name: id + required: true + type: string + - description: Retribusi update request + in: body + name: request + required: true + schema: + $ref: '#/definitions/retribusi.RetribusiUpdateRequest' + produces: + - application/json + responses: + "200": + description: Retribusi updated successfully + schema: + $ref: '#/definitions/retribusi.RetribusiUpdateResponse' + "400": + description: Bad request or validation error + schema: + $ref: '#/definitions/models.ErrorResponse' + "404": + description: Retribusi not found + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Update retribusi + tags: + - Retribusi + /api/v1/retribusis: + get: + consumes: + - application/json + description: Returns a paginated list of retribusis with optional summary statistics + parameters: + - default: 10 + description: Limit (max 100) + in: query + name: limit + type: integer + - default: 0 + description: Offset + in: query + name: offset + type: integer + - default: false + description: Include aggregation summary + in: query + name: include_summary + type: boolean + - description: Filter by status + in: query + name: status + type: string + - description: Filter by jenis + in: query + name: jenis + type: string + - description: Filter by dinas + in: query + name: dinas + type: string + - description: Search in multiple fields + in: query + name: search + type: string + produces: + - application/json + responses: + "200": + description: Success response + schema: + $ref: '#/definitions/retribusi.RetribusiGetResponse' + "400": + description: Bad request + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Get retribusi with pagination and optional aggregation + tags: + - Retribusi + post: + consumes: + - application/json + description: Creates a new retribusi record + parameters: + - description: Retribusi creation request + in: body + name: request + required: true + schema: + $ref: '#/definitions/retribusi.RetribusiCreateRequest' + produces: + - application/json + responses: + "201": + description: Retribusi created successfully + schema: + $ref: '#/definitions/retribusi.RetribusiCreateResponse' + "400": + description: Bad request or validation error + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Create retribusi + tags: + - Retribusi + /api/v1/retribusis/dynamic: + get: + consumes: + - application/json + description: Returns retribusis with advanced dynamic filtering like Directus + parameters: + - description: Fields to select (e.g., fields=*.*) + in: query + name: fields + type: string + - description: Dynamic filters (e.g., filter[Jenis][_eq]=value) + in: query + name: filter[column][operator] + type: string + - description: Sort fields (e.g., sort=date_created,-Jenis) + in: query + name: sort + type: string + - default: 10 + description: Limit + in: query + name: limit + type: integer + - default: 0 + description: Offset + in: query + name: offset + type: integer + produces: + - application/json + responses: + "200": + description: Success response + schema: + $ref: '#/definitions/retribusi.RetribusiGetResponse' + "400": + description: Bad request + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Get retribusi with dynamic filtering + tags: + - Retribusi + /api/v1/retribusis/stats: + get: + consumes: + - application/json + description: Returns comprehensive statistics about retribusi data + parameters: + - description: Filter statistics by status + in: query + name: status + type: string + produces: + - application/json + responses: + "200": + description: Statistics data + schema: + $ref: '#/definitions/models.AggregateData' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Get retribusi statistics + tags: + - Retribusi + /api/v1/token/generate: + post: + consumes: + - application/json + description: Generate a JWT token for a user + parameters: + - description: User credentials + in: body + name: token + required: true + schema: + $ref: '#/definitions/models.LoginRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.TokenResponse' + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + "401": + description: Unauthorized + schema: + additionalProperties: + type: string + type: object + summary: Generate JWT token + tags: + - Token + /api/v1/token/generate-direct: + post: + consumes: + - application/json + description: Generate a JWT token directly without password verification (for + testing) + parameters: + - description: User info + in: body + name: user + required: true + schema: + additionalProperties: + type: string + type: object + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.TokenResponse' + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + summary: Generate token directly + tags: + - Token +schemes: +- http +- https +swagger: "2.0" diff --git a/example.env b/example.env new file mode 100644 index 0000000..6c3222f --- /dev/null +++ b/example.env @@ -0,0 +1,92 @@ +# Server Configuration +PORT=8080 +GIN_MODE=debug + +# Default Database Configuration (PostgreSQL) +DB_CONNECTION=postgres +DB_USERNAME=stim +DB_PASSWORD=stim*RS54 +DB_HOST=10.10.123.165 +DB_DATABASE=satu_db +DB_PORT=5000 +DB_SSLMODE=disable + +# satudata Database Configuration (PostgreSQL) +# POSTGRES_CONNECTION=postgres +# POSTGRES_USERNAME=stim +# POSTGRES_PASSWORD=stim*RS54 +# POSTGRES_HOST=10.10.123.165 +# POSTGRES_DATABASE=satu_db +# POSTGRES_NAME=satu_db +# POSTGRES_PORT=5000 +# POSTGRES_SSLMODE=disable + + +POSTGRES_SATUDATA_CONNECTION=postgres +POSTGRES_SATUDATA_USERNAME=stim +POSTGRES_SATUDATA_PASSWORD=stim*RS54 +POSTGRES_SATUDATA_HOST=10.10.123.165 +POSTGRES_SATUDATA_DATABASE=satu_db +POSTGRES_SATUDATA_PORT=5000 +POSTGRES_SATUDATA_SSLMODE=disable + +# Mongo Database +MONGODB_MONGOHL7_CONNECTION=mongodb +MONGODB_MONGOHL7_HOST=10.10.123.206 +MONGODB_MONGOHL7_PORT=27017 +MONGODB_MONGOHL7_USER=admin +MONGODB_MONGOHL7_PASS=stim*rs54 +MONGODB_MONGOHL7_MASTER=master +MONGODB_MONGOHL7_LOCAL=local +MONGODB_MONGOHL7_SSLMODE=disable + +# MYSQL Antrian Database +MYSQL_ANTRIAN_CONNECTION=mysql +MYSQL_ANTRIAN_HOST=10.10.123.163 +MYSQL_ANTRIAN_USERNAME=www-data +MYSQL_ANTRIAN_PASSWORD=www-data +MYSQL_ANTRIAN_DATABASE=antrian_rssa +MYSQL_ANTRIAN_PORT=3306 +MYSQL_ANTRIAN_SSLMODE=disable + + +MYSQL_MEDICAL_CONNECTION=mysql +MYSQL_MEDICAL_HOST=10.10.123.147 +MYSQL_MEDICAL_USERNAME=meninjardev +MYSQL_MEDICAL_PASSWORD=meninjar*RS54 +MYSQL_MEDICAL_DATABASE=healtcare_database +MYSQL_MEDICAL_PORT=3306 +MYSQL_MEDICAL_SSLMODE=disable + +# Keycloak Configuration (optional) +KEYCLOAK_ISSUER=https://auth.rssa.top/realms/sandbox +KEYCLOAK_AUDIENCE=nuxtsim-pendaftaran +KEYCLOAK_JWKS_URL=https://auth.rssa.top/realms/sandbox/protocol/openid-connect/certs +KEYCLOAK_ENABLED=true + +# BPJS Configuration +BPJS_BASEURL=https://apijkn.bpjs-kesehatan.go.id/vclaim-rest +BPJS_CONSID=5257 +BPJS_USERKEY=4cf1cbef8c008440bbe9ef9ba789e482 +BPJS_SECRETKEY=1bV363512D + +BRIDGING_SATUSEHAT_ORG_ID=100026555 +BRIDGING_SATUSEHAT_FASYAKES_ID=3573011 +BRIDGING_SATUSEHAT_CLIENT_ID=l1ZgJGW6K5pnrqGUikWM7fgIoquA2AQ5UUG0U8WqHaq2VEyZ +BRIDGING_SATUSEHAT_CLIENT_SECRET=Al3PTYAW6axPiAFwaFlpn8qShLFW5YGMgG8w1qhexgCc7lGTEjjcR6zxa06ThPDy +BRIDGING_SATUSEHAT_AUTH_URL=https://api-satusehat.kemkes.go.id/oauth2/v1 +BRIDGING_SATUSEHAT_BASE_URL=https://api-satusehat.kemkes.go.id/fhir-r4/v1 +BRIDGING_SATUSEHAT_CONSENT_URL=https://api-satusehat.dto.kemkes.go.id/consent/v1 +BRIDGING_SATUSEHAT_KFA_URL=https://api-satusehat.kemkes.go.id/kfa-v2 + +SWAGGER_TITLE=My Custom API Service +SWAGGER_DESCRIPTION=This is a custom API service for managing various resources +SWAGGER_VERSION=2.0.0 +SWAGGER_CONTACT_NAME=STIM IT Support +SWAGGER_HOST=api.mycompany.com:8080 +SWAGGER_BASE_PATH=/api/v2 +SWAGGER_SCHEMES=https + +API_TITLE=API Service UJICOBA +API_DESCRIPTION=Dokumentation SWAGGER +API_VERSION=3.0.0 diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..5dd4cb6 --- /dev/null +++ b/go.mod @@ -0,0 +1,90 @@ +module api-service + +go 1.24.4 + +require ( + github.com/gin-gonic/gin v1.10.1 + github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/google/uuid v1.6.0 + github.com/gorilla/websocket v1.5.1 + github.com/jackc/pgx/v5 v5.7.2 // Ensure pgx is a direct dependency + go.mongodb.org/mongo-driver v1.17.3 + golang.org/x/crypto v0.41.0 + golang.org/x/sync v0.16.0 + gorm.io/driver/mysql v1.6.0 // GORM MySQL driver + gorm.io/driver/postgres v1.5.11 // Added GORM PostgreSQL driver + gorm.io/driver/sqlserver v1.6.1 // GORM SQL Server driver +) + +require ( + github.com/daku10/go-lz-string v0.0.6 + github.com/go-playground/validator/v10 v10.27.0 + github.com/go-sql-driver/mysql v1.8.1 + github.com/joho/godotenv v1.5.1 + github.com/lib/pq v1.10.9 + github.com/mashingan/smapping v0.1.19 + github.com/rs/zerolog v1.34.0 + github.com/swaggo/files v1.0.1 + github.com/swaggo/gin-swagger v1.6.0 + github.com/swaggo/swag v1.16.6 + github.com/tidwall/gjson v1.18.0 + gopkg.in/yaml.v2 v2.4.0 +) + +require ( + filippo.io/edwards25519 v1.1.0 // indirect + github.com/KyleBanks/depth v1.2.1 // indirect + github.com/PuerkitoBio/purell v1.1.1 // indirect + github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect + github.com/bytedance/sonic v1.14.0 // indirect + github.com/bytedance/sonic/loader v0.3.0 // indirect + github.com/cloudwego/base64x v0.1.6 // indirect + github.com/gabriel-vasile/mimetype v1.4.9 // indirect + github.com/gin-contrib/sse v1.1.0 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.19.6 // indirect + github.com/go-openapi/spec v0.20.4 // indirect + github.com/go-openapi/swag v0.19.15 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect + github.com/golang-sql/sqlexp v0.1.0 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mailru/easyjson v0.7.6 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/microsoft/go-mssqldb v1.8.2 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/montanaflynn/stats v0.7.1 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.0 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.3.0 // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.1.2 // indirect + github.com/xdg-go/stringprep v1.0.4 // indirect + github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect + golang.org/x/arch v0.20.0 // indirect + golang.org/x/mod v0.26.0 // indirect + golang.org/x/net v0.43.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/text v0.28.0 // indirect + golang.org/x/tools v0.35.0 // indirect + google.golang.org/protobuf v1.36.7 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + gorm.io/gorm v1.30.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..7ecaabb --- /dev/null +++ b/go.sum @@ -0,0 +1,361 @@ +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1 h1:E+OJmp2tPvt1W+amx48v1eqbjDYsgN+RzP4q16yV5eM= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1/go.mod h1:a6xsAQUZg+VsS3TJ05SRp524Hs4pZ/AeFSr5ENf0Yjo= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1/go.mod h1:uE9zaUfEQT/nbQjVi2IblCG9iaLtZsuYZ8ne+PuQ02M= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0 h1:U2rTu3Ef+7w9FHKIAXM6ZyqF3UOWJZ12zIm8zECAFfg= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0 h1:jBQA3cKT4L2rWMpgE7Yt3Hwh2aUj8KXjIGLxjHeYNNo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0/go.mod h1:4OG6tQ9EOP/MT0NMjDlRzWoVFxfu9rN9B2X+tlSVktg= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1 h1:MyVTgWR8qd/Jw1Le0NZebGBUCLbtak3bJ3z1OlqZBpw= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1/go.mod h1:GpPjLhVR9dnUoJMyHWSPy71xY9/lcmpzIPZXmF0FCVY= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= +github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= +github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ= +github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA= +github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA= +github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= +github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= +github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/daku10/go-lz-string v0.0.6 h1:aO8FFp4QPuNp7+WNyh1DyNjGF3UbZu95tUv9xOZNsYQ= +github.com/daku10/go-lz-string v0.0.6/go.mod h1:Vk++rSG3db8HXJaHEAbxiy/ukjTmPBw/iI+SrVZDzfs= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= +github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= +github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= +github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= +github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4= +github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk= +github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= +github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= +github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ= +github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs= +github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= +github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M= +github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= +github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= +github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= +github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.7.2 h1:mLoDLV6sonKlvjIEsV56SkWNCnuNv531l94GaIzO+XI= +github.com/jackc/pgx/v5 v5.7.2/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= +github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= +github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= +github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= +github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= +github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= +github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mashingan/smapping v0.1.19 h1:SsEtuPn2UcM1croIupPtGLgWgpYRuS0rSQMvKD9g2BQ= +github.com/mashingan/smapping v0.1.19/go.mod h1:FjfiwFxGOuNxL/OT1WcrNAwTPx0YJeg5JiXwBB1nyig= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/microsoft/go-mssqldb v1.8.2 h1:236sewazvC8FvG6Dr3bszrVhMkAl4KYImryLkRMCd0I= +github.com/microsoft/go-mssqldb v1.8.2/go.mod h1:vp38dT33FGfVotRiTmDo3bFyaHq+p3LektQrjTULowo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= +github.com/montanaflynn/stats v0.7.0/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= +github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY= +github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE= +github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg= +github.com/swaggo/gin-swagger v1.6.0 h1:y8sxvQ3E20/RCyrXeFfg60r6H0Z+SwpTjMYsMm+zy8M= +github.com/swaggo/gin-swagger v1.6.0/go.mod h1:BG00cCEy294xtVpyIAHG6+e2Qzj/xKlRdOqDkvq0uzo= +github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI= +github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA= +github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= +github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= +github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= +github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ= +go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c= +golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= +golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= +golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= +golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= +golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0= +golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A= +google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg= +gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo= +gorm.io/driver/postgres v1.5.11 h1:ubBVAfbKEUld/twyKZ0IYn9rSQh448EdelLYk9Mv314= +gorm.io/driver/postgres v1.5.11/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI= +gorm.io/driver/sqlserver v1.6.1 h1:XWISFsu2I2pqd1KJhhTZNJMx1jNQ+zVL/Q8ovDcUjtY= +gorm.io/driver/sqlserver v1.6.1/go.mod h1:VZeNn7hqX1aXoN5TPAFGWvxWG90xtA8erGn2gQmpc6U= +gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= +gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/internal/config/config.go b/internal/config/config.go new file mode 100644 index 0000000..f34deb4 --- /dev/null +++ b/internal/config/config.go @@ -0,0 +1,739 @@ +package config + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "fmt" + "log" + "os" + "strconv" + "strings" + "time" + + "github.com/go-playground/validator/v10" +) + +type Config struct { + Server ServerConfig + Databases map[string]DatabaseConfig + ReadReplicas map[string][]DatabaseConfig // For read replicas + Keycloak KeycloakConfig + Bpjs BpjsConfig + SatuSehat SatuSehatConfig + Swagger SwaggerConfig + Validator *validator.Validate +} + +type SwaggerConfig struct { + Title string + Description string + Version string + TermsOfService string + ContactName string + ContactURL string + ContactEmail string + LicenseName string + LicenseURL string + Host string + BasePath string + Schemes []string +} + +type ServerConfig struct { + Port int + Mode string +} + +type DatabaseConfig struct { + Name string + Type string // postgres, mysql, sqlserver, sqlite, mongodb + Host string + Port int + Username string + Password string + Database string + Schema string + SSLMode string + Path string // For SQLite + Options string // Additional connection options + MaxOpenConns int // Max open connections + MaxIdleConns int // Max idle connections + ConnMaxLifetime time.Duration // Connection max lifetime +} + +type KeycloakConfig struct { + Issuer string + Audience string + JwksURL string + Enabled bool +} + +type BpjsConfig struct { + BaseURL string `json:"base_url"` + ConsID string `json:"cons_id"` + UserKey string `json:"user_key"` + SecretKey string `json:"secret_key"` + Timeout time.Duration `json:"timeout"` +} + +type SatuSehatConfig struct { + OrgID string `json:"org_id"` + FasyakesID string `json:"fasyakes_id"` + ClientID string `json:"client_id"` + ClientSecret string `json:"client_secret"` + AuthURL string `json:"auth_url"` + BaseURL string `json:"base_url"` + ConsentURL string `json:"consent_url"` + KFAURL string `json:"kfa_url"` + Timeout time.Duration `json:"timeout"` +} + +// SetHeader generates required headers for BPJS VClaim API +// func (cfg BpjsConfig) SetHeader() (string, string, string, string, string) { +// timenow := time.Now().UTC() +// t, err := time.Parse(time.RFC3339, "1970-01-01T00:00:00Z") +// if err != nil { +// log.Fatal(err) +// } + +// tstamp := timenow.Unix() - t.Unix() +// secret := []byte(cfg.SecretKey) +// message := []byte(cfg.ConsID + "&" + fmt.Sprint(tstamp)) +// hash := hmac.New(sha256.New, secret) +// hash.Write(message) + +// // to lowercase hexits +// hex.EncodeToString(hash.Sum(nil)) +// // to base64 +// xSignature := base64.StdEncoding.EncodeToString(hash.Sum(nil)) + +// return cfg.ConsID, cfg.SecretKey, cfg.UserKey, fmt.Sprint(tstamp), xSignature +// } +func (cfg BpjsConfig) SetHeader() (string, string, string, string, string) { + timenow := time.Now().UTC() + t, err := time.Parse(time.RFC3339, "1970-01-01T00:00:00Z") + if err != nil { + log.Fatal(err) + } + + tstamp := timenow.Unix() - t.Unix() + secret := []byte(cfg.SecretKey) + message := []byte(cfg.ConsID + "&" + fmt.Sprint(tstamp)) + hash := hmac.New(sha256.New, secret) + hash.Write(message) + + // to lowercase hexits + hex.EncodeToString(hash.Sum(nil)) + // to base64 + xSignature := base64.StdEncoding.EncodeToString(hash.Sum(nil)) + + return cfg.ConsID, cfg.SecretKey, cfg.UserKey, fmt.Sprint(tstamp), xSignature +} + +type ConfigBpjs struct { + Cons_id string + Secret_key string + User_key string +} + +// SetHeader for backward compatibility +func (cfg ConfigBpjs) SetHeader() (string, string, string, string, string) { + bpjsConfig := BpjsConfig{ + ConsID: cfg.Cons_id, + SecretKey: cfg.Secret_key, + UserKey: cfg.User_key, + } + return bpjsConfig.SetHeader() +} + +func LoadConfig() *Config { + config := &Config{ + Server: ServerConfig{ + Port: getEnvAsInt("PORT", 8080), + Mode: getEnv("GIN_MODE", "debug"), + }, + Databases: make(map[string]DatabaseConfig), + ReadReplicas: make(map[string][]DatabaseConfig), + Keycloak: KeycloakConfig{ + Issuer: getEnv("KEYCLOAK_ISSUER", "https://keycloak.example.com/auth/realms/yourrealm"), + Audience: getEnv("KEYCLOAK_AUDIENCE", "your-client-id"), + JwksURL: getEnv("KEYCLOAK_JWKS_URL", "https://keycloak.example.com/auth/realms/yourrealm/protocol/openid-connect/certs"), + Enabled: getEnvAsBool("KEYCLOAK_ENABLED", true), + }, + Bpjs: BpjsConfig{ + BaseURL: getEnv("BPJS_BASEURL", "https://apijkn.bpjs-kesehatan.go.id"), + ConsID: getEnv("BPJS_CONSID", ""), + UserKey: getEnv("BPJS_USERKEY", ""), + SecretKey: getEnv("BPJS_SECRETKEY", ""), + Timeout: parseDuration(getEnv("BPJS_TIMEOUT", "30s")), + }, + SatuSehat: SatuSehatConfig{ + OrgID: getEnv("BRIDGING_SATUSEHAT_ORG_ID", ""), + FasyakesID: getEnv("BRIDGING_SATUSEHAT_FASYAKES_ID", ""), + ClientID: getEnv("BRIDGING_SATUSEHAT_CLIENT_ID", ""), + ClientSecret: getEnv("BRIDGING_SATUSEHAT_CLIENT_SECRET", ""), + AuthURL: getEnv("BRIDGING_SATUSEHAT_AUTH_URL", "https://api-satusehat.kemkes.go.id/oauth2/v1"), + BaseURL: getEnv("BRIDGING_SATUSEHAT_BASE_URL", "https://api-satusehat.kemkes.go.id/fhir-r4/v1"), + ConsentURL: getEnv("BRIDGING_SATUSEHAT_CONSENT_URL", "https://api-satusehat.dto.kemkes.go.id/consent/v1"), + KFAURL: getEnv("BRIDGING_SATUSEHAT_KFA_URL", "https://api-satusehat.kemkes.go.id/kfa-v2"), + Timeout: parseDuration(getEnv("BRIDGING_SATUSEHAT_TIMEOUT", "30s")), + }, + Swagger: SwaggerConfig{ + Title: getEnv("SWAGGER_TITLE", "SERVICE API"), + Description: getEnv("SWAGGER_DESCRIPTION", "CUSTUM SERVICE API"), + Version: getEnv("SWAGGER_VERSION", "1.0.0"), + TermsOfService: getEnv("SWAGGER_TERMS_OF_SERVICE", "http://swagger.io/terms/"), + ContactName: getEnv("SWAGGER_CONTACT_NAME", "API Support"), + ContactURL: getEnv("SWAGGER_CONTACT_URL", "http://rssa.example.com/support"), + ContactEmail: getEnv("SWAGGER_CONTACT_EMAIL", "support@swagger.io"), + LicenseName: getEnv("SWAGGER_LICENSE_NAME", "Apache 2.0"), + LicenseURL: getEnv("SWAGGER_LICENSE_URL", "http://www.apache.org/licenses/LICENSE-2.0.html"), + Host: getEnv("SWAGGER_HOST", "localhost:8080"), + BasePath: getEnv("SWAGGER_BASE_PATH", "/api/v1"), + Schemes: parseSchemes(getEnv("SWAGGER_SCHEMES", "http,https")), + }, + } + + // Initialize validator + config.Validator = validator.New() + + // Load database configurations + config.loadDatabaseConfigs() + + // Load read replica configurations + config.loadReadReplicaConfigs() + + return config +} + +func (c *Config) loadDatabaseConfigs() { + // Simplified approach: Directly load from environment variables + // This ensures we get the exact values specified in .env + + // Primary database configuration + c.Databases["default"] = DatabaseConfig{ + Name: "default", + Type: getEnv("DB_CONNECTION", "postgres"), + Host: getEnv("DB_HOST", "localhost"), + Port: getEnvAsInt("DB_PORT", 5432), + Username: getEnv("DB_USERNAME", ""), + Password: getEnv("DB_PASSWORD", ""), + Database: getEnv("DB_DATABASE", "satu_db"), + Schema: getEnv("DB_SCHEMA", "public"), + SSLMode: getEnv("DB_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("DB_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt("DB_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv("DB_CONN_MAX_LIFETIME", "5m")), + } + + // SATUDATA database configuration + c.addPostgreSQLConfigs() + + // MongoDB database configuration + c.addMongoDBConfigs() + + // Legacy support for backward compatibility + envVars := os.Environ() + dbConfigs := make(map[string]map[string]string) + + // Parse database configurations from environment variables + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + value := parts[1] + + // Parse specific database configurations + if strings.HasSuffix(key, "_CONNECTION") || strings.HasSuffix(key, "_HOST") || + strings.HasSuffix(key, "_DATABASE") || strings.HasSuffix(key, "_USERNAME") || + strings.HasSuffix(key, "_PASSWORD") || strings.HasSuffix(key, "_PORT") || + strings.HasSuffix(key, "_NAME") { + + segments := strings.Split(key, "_") + if len(segments) >= 2 { + dbName := strings.ToLower(strings.Join(segments[:len(segments)-1], "_")) + property := strings.ToLower(segments[len(segments)-1]) + + if dbConfigs[dbName] == nil { + dbConfigs[dbName] = make(map[string]string) + } + dbConfigs[dbName][property] = value + } + } + } + + // Create DatabaseConfig from parsed configurations for additional databases + for name, config := range dbConfigs { + // Skip empty configurations or system configurations + if name == "" || strings.Contains(name, "chrome_crashpad_pipe") || name == "primary" { + continue + } + + dbConfig := DatabaseConfig{ + Name: name, + Type: getEnvFromMap(config, "connection", getEnvFromMap(config, "type", "postgres")), + Host: getEnvFromMap(config, "host", "localhost"), + Port: getEnvAsIntFromMap(config, "port", 5432), + Username: getEnvFromMap(config, "username", ""), + Password: getEnvFromMap(config, "password", ""), + Database: getEnvFromMap(config, "database", getEnvFromMap(config, "name", name)), + Schema: getEnvFromMap(config, "schema", "public"), + SSLMode: getEnvFromMap(config, "sslmode", "disable"), + Path: getEnvFromMap(config, "path", ""), + Options: getEnvFromMap(config, "options", ""), + MaxOpenConns: getEnvAsIntFromMap(config, "max_open_conns", 25), + MaxIdleConns: getEnvAsIntFromMap(config, "max_idle_conns", 25), + ConnMaxLifetime: parseDuration(getEnvFromMap(config, "conn_max_lifetime", "5m")), + } + + // Skip if username is empty and it's not a system config + if dbConfig.Username == "" && !strings.HasPrefix(name, "chrome") { + continue + } + + c.Databases[name] = dbConfig + } +} + +func (c *Config) loadReadReplicaConfigs() { + envVars := os.Environ() + + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + value := parts[1] + + // Parse read replica configurations (format: [DBNAME]_REPLICA_[INDEX]_[PROPERTY]) + if strings.Contains(key, "_REPLICA_") { + segments := strings.Split(key, "_") + if len(segments) >= 5 && strings.ToUpper(segments[2]) == "REPLICA" { + dbName := strings.ToLower(segments[1]) + replicaIndex := segments[3] + property := strings.ToLower(strings.Join(segments[4:], "_")) + + replicaKey := dbName + "_replica_" + replicaIndex + + if c.ReadReplicas[dbName] == nil { + c.ReadReplicas[dbName] = []DatabaseConfig{} + } + + // Find or create replica config + var replicaConfig *DatabaseConfig + for i := range c.ReadReplicas[dbName] { + if c.ReadReplicas[dbName][i].Name == replicaKey { + replicaConfig = &c.ReadReplicas[dbName][i] + break + } + } + + if replicaConfig == nil { + // Create new replica config + newConfig := DatabaseConfig{ + Name: replicaKey, + Type: c.Databases[dbName].Type, + Host: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_HOST", c.Databases[dbName].Host), + Port: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_PORT", c.Databases[dbName].Port), + Username: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_USERNAME", c.Databases[dbName].Username), + Password: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_PASSWORD", c.Databases[dbName].Password), + Database: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_DATABASE", c.Databases[dbName].Database), + Schema: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SCHEMA", c.Databases[dbName].Schema), + SSLMode: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SSLMODE", c.Databases[dbName].SSLMode), + MaxOpenConns: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_OPEN_CONNS", c.Databases[dbName].MaxOpenConns), + MaxIdleConns: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_IDLE_CONNS", c.Databases[dbName].MaxIdleConns), + ConnMaxLifetime: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_CONN_MAX_LIFETIME", "5m")), + } + c.ReadReplicas[dbName] = append(c.ReadReplicas[dbName], newConfig) + replicaConfig = &c.ReadReplicas[dbName][len(c.ReadReplicas[dbName])-1] + } + + // Update the specific replica + switch property { + case "host": + replicaConfig.Host = value + case "port": + replicaConfig.Port = getEnvAsInt(key, 5432) + case "username": + replicaConfig.Username = value + case "password": + replicaConfig.Password = value + case "database": + replicaConfig.Database = value + case "schema": + replicaConfig.Schema = value + case "sslmode": + replicaConfig.SSLMode = value + case "max_open_conns": + replicaConfig.MaxOpenConns = getEnvAsInt(key, 25) + case "max_idle_conns": + replicaConfig.MaxIdleConns = getEnvAsInt(key, 25) + case "conn_max_lifetime": + replicaConfig.ConnMaxLifetime = parseDuration(value) + } + } + } + } +} + +func (c *Config) addSpecificDatabase(prefix, defaultType string) { + connection := getEnv(strings.ToUpper(prefix)+"_CONNECTION", defaultType) + host := getEnv(strings.ToUpper(prefix)+"_HOST", "") + if host != "" { + dbConfig := DatabaseConfig{ + Name: prefix, + Type: connection, + Host: host, + Port: getEnvAsInt(strings.ToUpper(prefix)+"_PORT", 5432), + Username: getEnv(strings.ToUpper(prefix)+"_USERNAME", ""), + Password: getEnv(strings.ToUpper(prefix)+"_PASSWORD", ""), + Database: getEnv(strings.ToUpper(prefix)+"_DATABASE", getEnv(strings.ToUpper(prefix)+"_NAME", prefix)), + Schema: getEnv(strings.ToUpper(prefix)+"_SCHEMA", "public"), + SSLMode: getEnv(strings.ToUpper(prefix)+"_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt(strings.ToUpper(prefix)+"_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt(strings.ToUpper(prefix)+"_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv(strings.ToUpper(prefix)+"_CONN_MAX_LIFETIME", "5m")), + } + c.Databases[prefix] = dbConfig + } +} + +// PostgreSQL database +func (c *Config) addPostgreSQLConfigs() { + // SATUDATA database configuration + // defaultPOSTGRESHost := getEnv("POSTGRES_HOST", "localhost") + // if defaultPOSTGRESHost != "" { + // c.Databases["postgres"] = DatabaseConfig{ + // Name: "postgres", + // Type: getEnv("POSTGRES_CONNECTION", "postgres"), + // Host: defaultPOSTGRESHost, + // Port: getEnvAsInt("POSTGRES_PORT", 5432), + // Username: getEnv("POSTGRES_USERNAME", ""), + // Password: getEnv("POSTGRES_PASSWORD", ""), + // Database: getEnv("POSTGRES_DATABASE", "postgres"), + // Schema: getEnv("POSTGRES_SCHEMA", "public"), + // SSLMode: getEnv("POSTGRES_SSLMODE", "disable"), + // MaxOpenConns: getEnvAsInt("POSTGRES_MAX_OPEN_CONNS", 25), + // MaxIdleConns: getEnvAsInt("POSTGRES_MAX_IDLE_CONNS", 25), + // ConnMaxLifetime: parseDuration(getEnv("POSTGRES_CONN_MAX_LIFETIME", "5m")), + // } + // } + + // Support for custom PostgreSQL configurations with POSTGRES_ prefix + envVars := os.Environ() + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + // Parse PostgreSQL configurations (format: POSTGRES_[NAME]_[PROPERTY]) + if strings.HasPrefix(key, "POSTGRES_") && strings.Contains(key, "_") { + segments := strings.Split(key, "_") + if len(segments) >= 3 { + dbName := strings.ToLower(strings.Join(segments[1:len(segments)-1], "_")) + + // Skip if it's a standard PostgreSQL configuration + if dbName == "connection" || dbName == "dev" || dbName == "default" || dbName == "satudata" { + continue + } + + // Create or update PostgreSQL configuration + if _, exists := c.Databases[dbName]; !exists { + c.Databases[dbName] = DatabaseConfig{ + Name: dbName, + Type: "postgres", + Host: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_HOST", "localhost"), + Port: getEnvAsInt("POSTGRES_"+strings.ToUpper(dbName)+"_PORT", 5432), + Username: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_USERNAME", ""), + Password: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_PASSWORD", ""), + Database: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_DATABASE", dbName), + Schema: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_SCHEMA", "public"), + SSLMode: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("POSTGRES_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt("POSTGRES_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv("POSTGRES_CONN_MAX_LIFETIME", "5m")), + } + } + } + } + } +} + +// addMYSQLConfigs adds MYSQL database +func (c *Config) addMySQLConfigs() { + // Primary MySQL configuration + defaultMySQLHost := getEnv("MYSQL_HOST", "") + if defaultMySQLHost != "" { + c.Databases["mysql"] = DatabaseConfig{ + Name: "mysql", + Type: getEnv("MYSQL_CONNECTION", "mysql"), + Host: defaultMySQLHost, + Port: getEnvAsInt("MYSQL_PORT", 3306), + Username: getEnv("MYSQL_USERNAME", ""), + Password: getEnv("MYSQL_PASSWORD", ""), + Database: getEnv("MYSQL_DATABASE", "mysql"), + SSLMode: getEnv("MYSQL_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MYSQL_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt("MYSQL_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv("MYSQL_CONN_MAX_LIFETIME", "5m")), + } + } + + // Support for custom MySQL configurations with MYSQL_ prefix + envVars := os.Environ() + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + // Parse MySQL configurations (format: MYSQL_[NAME]_[PROPERTY]) + if strings.HasPrefix(key, "MYSQL_") && strings.Contains(key, "_") { + segments := strings.Split(key, "_") + if len(segments) >= 3 { + dbName := strings.ToLower(strings.Join(segments[1:len(segments)-1], "_")) + + // Skip if it's a standard MySQL configuration + if dbName == "connection" || dbName == "dev" || dbName == "max" || dbName == "conn" { + continue + } + + // Create or update MySQL configuration + if _, exists := c.Databases[dbName]; !exists { + mysqlHost := getEnv("MYSQL_"+strings.ToUpper(dbName)+"_HOST", "") + if mysqlHost != "" { + c.Databases[dbName] = DatabaseConfig{ + Name: dbName, + Type: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_CONNECTION", "mysql"), + Host: mysqlHost, + Port: getEnvAsInt("MYSQL_"+strings.ToUpper(dbName)+"_PORT", 3306), + Username: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_USERNAME", ""), + Password: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_PASSWORD", ""), + Database: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_DATABASE", dbName), + SSLMode: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MYSQL_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt("MYSQL_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv("MYSQL_CONN_MAX_LIFETIME", "5m")), + } + } + } + } + } + } +} + +// addMongoDBConfigs adds MongoDB database configurations from environment variables +func (c *Config) addMongoDBConfigs() { + // Primary MongoDB configuration + mongoHost := getEnv("MONGODB_HOST", "") + if mongoHost != "" { + c.Databases["mongodb"] = DatabaseConfig{ + Name: "mongodb", + Type: getEnv("MONGODB_CONNECTION", "mongodb"), + Host: mongoHost, + Port: getEnvAsInt("MONGODB_PORT", 27017), + Username: getEnv("MONGODB_USER", ""), + Password: getEnv("MONGODB_PASS", ""), + Database: getEnv("MONGODB_MASTER", "master"), + SSLMode: getEnv("MONGODB_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100), + MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10), + ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")), + } + } + + // Additional MongoDB configurations for local database + mongoLocalHost := getEnv("MONGODB_LOCAL_HOST", "") + if mongoLocalHost != "" { + c.Databases["mongodb_local"] = DatabaseConfig{ + Name: "mongodb_local", + Type: getEnv("MONGODB_CONNECTION", "mongodb"), + Host: mongoLocalHost, + Port: getEnvAsInt("MONGODB_LOCAL_PORT", 27017), + Username: getEnv("MONGODB_LOCAL_USER", ""), + Password: getEnv("MONGODB_LOCAL_PASS", ""), + Database: getEnv("MONGODB_LOCAL_DB", "local"), + SSLMode: getEnv("MONGOD_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100), + MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10), + ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")), + } + } + + // Support for custom MongoDB configurations with MONGODB_ prefix + envVars := os.Environ() + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + // Parse MongoDB configurations (format: MONGODB_[NAME]_[PROPERTY]) + if strings.HasPrefix(key, "MONGODB_") && strings.Contains(key, "_") { + segments := strings.Split(key, "_") + if len(segments) >= 3 { + dbName := strings.ToLower(strings.Join(segments[1:len(segments)-1], "_")) + // Skip if it's a standard MongoDB configuration + if dbName == "connection" || dbName == "dev" || dbName == "local" { + continue + } + + // Create or update MongoDB configuration + if _, exists := c.Databases[dbName]; !exists { + c.Databases[dbName] = DatabaseConfig{ + Name: dbName, + Type: "mongodb", + Host: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_HOST", "localhost"), + Port: getEnvAsInt("MONGODB_"+strings.ToUpper(dbName)+"_PORT", 27017), + Username: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_USER", ""), + Password: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_PASS", ""), + Database: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_DB", dbName), + SSLMode: getEnv("MONGOD_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100), + MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10), + ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")), + } + } + } + } + } +} + +func getEnvFromMap(config map[string]string, key, defaultValue string) string { + if value, exists := config[key]; exists { + return value + } + return defaultValue +} + +func getEnvAsIntFromMap(config map[string]string, key string, defaultValue int) int { + if value, exists := config[key]; exists { + if intValue, err := strconv.Atoi(value); err == nil { + return intValue + } + } + return defaultValue +} + +func parseDuration(durationStr string) time.Duration { + if duration, err := time.ParseDuration(durationStr); err == nil { + return duration + } + return 5 * time.Minute +} + +func getEnv(key, defaultValue string) string { + if value := os.Getenv(key); value != "" { + return value + } + return defaultValue +} + +func getEnvAsInt(key string, defaultValue int) int { + valueStr := getEnv(key, "") + if value, err := strconv.Atoi(valueStr); err == nil { + return value + } + return defaultValue +} + +func getEnvAsBool(key string, defaultValue bool) bool { + valueStr := getEnv(key, "") + if value, err := strconv.ParseBool(valueStr); err == nil { + return value + } + return defaultValue +} + +// parseSchemes parses comma-separated schemes string into a slice +func parseSchemes(schemesStr string) []string { + if schemesStr == "" { + return []string{"http"} + } + + schemes := strings.Split(schemesStr, ",") + for i, scheme := range schemes { + schemes[i] = strings.TrimSpace(scheme) + } + return schemes +} + +func (c *Config) Validate() error { + if len(c.Databases) == 0 { + log.Fatal("At least one database configuration is required") + } + + for name, db := range c.Databases { + if db.Host == "" { + log.Fatalf("Database host is required for %s", name) + } + if db.Username == "" { + log.Fatalf("Database username is required for %s", name) + } + if db.Password == "" { + log.Fatalf("Database password is required for %s", name) + } + if db.Database == "" { + log.Fatalf("Database name is required for %s", name) + } + } + + if c.Bpjs.BaseURL == "" { + log.Fatal("BPJS Base URL is required") + } + if c.Bpjs.ConsID == "" { + log.Fatal("BPJS Consumer ID is required") + } + if c.Bpjs.UserKey == "" { + log.Fatal("BPJS User Key is required") + } + if c.Bpjs.SecretKey == "" { + log.Fatal("BPJS Secret Key is required") + } + + // Validate Keycloak configuration if enabled + if c.Keycloak.Enabled { + if c.Keycloak.Issuer == "" { + log.Fatal("Keycloak issuer is required when Keycloak is enabled") + } + if c.Keycloak.Audience == "" { + log.Fatal("Keycloak audience is required when Keycloak is enabled") + } + if c.Keycloak.JwksURL == "" { + log.Fatal("Keycloak JWKS URL is required when Keycloak is enabled") + } + } + + // Validate SatuSehat configuration + if c.SatuSehat.OrgID == "" { + log.Fatal("SatuSehat Organization ID is required") + } + if c.SatuSehat.FasyakesID == "" { + log.Fatal("SatuSehat Fasyankes ID is required") + } + if c.SatuSehat.ClientID == "" { + log.Fatal("SatuSehat Client ID is required") + } + if c.SatuSehat.ClientSecret == "" { + log.Fatal("SatuSehat Client Secret is required") + } + if c.SatuSehat.AuthURL == "" { + log.Fatal("SatuSehat Auth URL is required") + } + if c.SatuSehat.BaseURL == "" { + log.Fatal("SatuSehat Base URL is required") + } + + return nil +} diff --git a/internal/database/database.go b/internal/database/database.go new file mode 100644 index 0000000..b7f5b4f --- /dev/null +++ b/internal/database/database.go @@ -0,0 +1,699 @@ +package database + +import ( + "context" + "database/sql" + "fmt" + "log" // Import runtime package + + // Import debug package + "strconv" + "sync" + "time" + + "api-service/internal/config" + + _ "github.com/jackc/pgx/v5" // Import pgx driver + "github.com/lib/pq" + _ "gorm.io/driver/postgres" // Import GORM PostgreSQL driver + + _ "github.com/go-sql-driver/mysql" // MySQL driver for database/sql + _ "gorm.io/driver/mysql" // GORM MySQL driver + _ "gorm.io/driver/sqlserver" // GORM SQL Server driver + + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +// DatabaseType represents supported database types +type DatabaseType string + +const ( + Postgres DatabaseType = "postgres" + MySQL DatabaseType = "mysql" + SQLServer DatabaseType = "sqlserver" + SQLite DatabaseType = "sqlite" + MongoDB DatabaseType = "mongodb" +) + +// Service represents a service that interacts with multiple databases +type Service interface { + Health() map[string]map[string]string + GetDB(name string) (*sql.DB, error) + GetMongoClient(name string) (*mongo.Client, error) + GetReadDB(name string) (*sql.DB, error) // For read replicas + Close() error + ListDBs() []string + GetDBType(name string) (DatabaseType, error) + // Tambahkan method untuk WebSocket notifications + ListenForChanges(ctx context.Context, dbName string, channels []string, callback func(string, string)) error + NotifyChange(dbName, channel, payload string) error + GetPrimaryDB(name string) (*sql.DB, error) // Helper untuk get primary DB +} + +type service struct { + sqlDatabases map[string]*sql.DB + mongoClients map[string]*mongo.Client + readReplicas map[string][]*sql.DB // Read replicas for load balancing + configs map[string]config.DatabaseConfig + readConfigs map[string][]config.DatabaseConfig + mu sync.RWMutex + readBalancer map[string]int // Round-robin counter for read replicas + listeners map[string]*pq.Listener // Tambahkan untuk tracking listeners + listenersMu sync.RWMutex +} + +var ( + dbManager *service + once sync.Once +) + +// New creates a new database service with multiple connections +func New(cfg *config.Config) Service { + once.Do(func() { + dbManager = &service{ + sqlDatabases: make(map[string]*sql.DB), + mongoClients: make(map[string]*mongo.Client), + readReplicas: make(map[string][]*sql.DB), + configs: make(map[string]config.DatabaseConfig), + readConfigs: make(map[string][]config.DatabaseConfig), + readBalancer: make(map[string]int), + listeners: make(map[string]*pq.Listener), + } + + log.Println("Initializing database service...") // Log when the initialization starts + // log.Printf("Current Goroutine ID: %d", runtime.NumGoroutine()) // Log the number of goroutines + // log.Printf("Stack Trace: %s", debug.Stack()) // Log the stack trace + dbManager.loadFromConfig(cfg) + + // Initialize all databases + for name, dbConfig := range dbManager.configs { + if err := dbManager.addDatabase(name, dbConfig); err != nil { + log.Printf("Failed to connect to database %s: %v", name, err) + } + } + + // Initialize read replicas + for name, replicaConfigs := range dbManager.readConfigs { + for i, replicaConfig := range replicaConfigs { + if err := dbManager.addReadReplica(name, i, replicaConfig); err != nil { + log.Printf("Failed to connect to read replica %s[%d]: %v", name, i, err) + } + } + } + }) + + return dbManager +} + +func (s *service) loadFromConfig(cfg *config.Config) { + s.mu.Lock() + defer s.mu.Unlock() + + // Load primary databases + for name, dbConfig := range cfg.Databases { + s.configs[name] = dbConfig + } + + // Load read replicas + for name, replicaConfigs := range cfg.ReadReplicas { + s.readConfigs[name] = replicaConfigs + } +} + +func (s *service) addDatabase(name string, config config.DatabaseConfig) error { + s.mu.Lock() + defer s.mu.Unlock() + + log.Printf("=== Database Connection Debug ===") + // log.Printf("Database: %s", name) + // log.Printf("Type: %s", config.Type) + // log.Printf("Host: %s", config.Host) + // log.Printf("Port: %d", config.Port) + // log.Printf("Database: %s", config.Database) + // log.Printf("Username: %s", config.Username) + // log.Printf("SSLMode: %s", config.SSLMode) + + var db *sql.DB + var err error + + dbType := DatabaseType(config.Type) + + switch dbType { + case Postgres: + db, err = s.openPostgresConnection(config) + case MySQL: + db, err = s.openMySQLConnection(config) + case SQLServer: + db, err = s.openSQLServerConnection(config) + case SQLite: + db, err = s.openSQLiteConnection(config) + case MongoDB: + return s.addMongoDB(name, config) + default: + return fmt.Errorf("unsupported database type: %s", config.Type) + } + + if err != nil { + log.Printf("โŒ Error connecting to database %s: %v", name, err) + log.Printf(" Database: %s@%s:%d/%s", config.Username, config.Host, config.Port, config.Database) + return err + } + + log.Printf("โœ… Successfully connected to database: %s", name) + return s.configureSQLDB(name, db, config.MaxOpenConns, config.MaxIdleConns, config.ConnMaxLifetime) +} + +func (s *service) addReadReplica(name string, index int, config config.DatabaseConfig) error { + s.mu.Lock() + defer s.mu.Unlock() + + var db *sql.DB + var err error + + dbType := DatabaseType(config.Type) + + switch dbType { + case Postgres: + db, err = s.openPostgresConnection(config) + case MySQL: + db, err = s.openMySQLConnection(config) + case SQLServer: + db, err = s.openSQLServerConnection(config) + case SQLite: + db, err = s.openSQLiteConnection(config) + default: + return fmt.Errorf("unsupported database type for read replica: %s", config.Type) + } + + if err != nil { + return err + } + + if s.readReplicas[name] == nil { + s.readReplicas[name] = make([]*sql.DB, 0) + } + + // Ensure we have enough slots + for len(s.readReplicas[name]) <= index { + s.readReplicas[name] = append(s.readReplicas[name], nil) + } + + s.readReplicas[name][index] = db + log.Printf("Successfully connected to read replica %s[%d]", name, index) + + return nil +} + +func (s *service) openPostgresConnection(config config.DatabaseConfig) (*sql.DB, error) { + connStr := fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=%s", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + config.SSLMode, + ) + + if config.Schema != "" { + connStr += "&search_path=" + config.Schema + } + + db, err := sql.Open("pgx", connStr) + if err != nil { + return nil, fmt.Errorf("failed to open PostgreSQL connection: %w", err) + } + + return db, nil +} + +func (s *service) openMySQLConnection(config config.DatabaseConfig) (*sql.DB, error) { + connStr := fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?parseTime=true", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + ) + + db, err := sql.Open("mysql", connStr) + if err != nil { + return nil, fmt.Errorf("failed to open MySQL connection: %w", err) + } + + return db, nil +} + +func (s *service) openSQLServerConnection(config config.DatabaseConfig) (*sql.DB, error) { + connStr := fmt.Sprintf("sqlserver://%s:%s@%s:%d?database=%s", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + ) + + db, err := sql.Open("sqlserver", connStr) + if err != nil { + return nil, fmt.Errorf("failed to open SQL Server connection: %w", err) + } + + return db, nil +} + +func (s *service) openSQLiteConnection(config config.DatabaseConfig) (*sql.DB, error) { + dbPath := config.Path + if dbPath == "" { + dbPath = fmt.Sprintf("./data/%s.db", config.Database) + } + + db, err := sql.Open("sqlite3", dbPath) + if err != nil { + return nil, fmt.Errorf("failed to open SQLite connection: %w", err) + } + + return db, nil +} + +func (s *service) addMongoDB(name string, config config.DatabaseConfig) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + uri := fmt.Sprintf("mongodb://%s:%s@%s:%d/%s", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + ) + + client, err := mongo.Connect(ctx, options.Client().ApplyURI(uri)) + if err != nil { + return fmt.Errorf("failed to connect to MongoDB: %w", err) + } + + s.mongoClients[name] = client + log.Printf("Successfully connected to MongoDB: %s", name) + + return nil +} + +func (s *service) configureSQLDB(name string, db *sql.DB, maxOpenConns, maxIdleConns int, connMaxLifetime time.Duration) error { + db.SetMaxOpenConns(maxOpenConns) + db.SetMaxIdleConns(maxIdleConns) + db.SetConnMaxLifetime(connMaxLifetime) + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + if err := db.PingContext(ctx); err != nil { + db.Close() + return fmt.Errorf("failed to ping database: %w", err) + } + + s.sqlDatabases[name] = db + log.Printf("Successfully connected to SQL database: %s", name) + + return nil +} + +// Health checks the health of all database connections by pinging each database. +func (s *service) Health() map[string]map[string]string { + s.mu.RLock() + defer s.mu.RUnlock() + + result := make(map[string]map[string]string) + + // Check SQL databases + for name, db := range s.sqlDatabases { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + stats := make(map[string]string) + + err := db.PingContext(ctx) + if err != nil { + stats["status"] = "down" + stats["error"] = fmt.Sprintf("db down: %v", err) + stats["type"] = "sql" + stats["role"] = "primary" + result[name] = stats + continue + } + + stats["status"] = "up" + stats["message"] = "It's healthy" + stats["type"] = "sql" + stats["role"] = "primary" + + dbStats := db.Stats() + stats["open_connections"] = strconv.Itoa(dbStats.OpenConnections) + stats["in_use"] = strconv.Itoa(dbStats.InUse) + stats["idle"] = strconv.Itoa(dbStats.Idle) + stats["wait_count"] = strconv.FormatInt(dbStats.WaitCount, 10) + stats["wait_duration"] = dbStats.WaitDuration.String() + stats["max_idle_closed"] = strconv.FormatInt(dbStats.MaxIdleClosed, 10) + stats["max_lifetime_closed"] = strconv.FormatInt(dbStats.MaxLifetimeClosed, 10) + + if dbStats.OpenConnections > 40 { + stats["message"] = "The database is experiencing heavy load." + } + + if dbStats.WaitCount > 1000 { + stats["message"] = "The database has a high number of wait events, indicating potential bottlenecks." + } + + if dbStats.MaxIdleClosed > int64(dbStats.OpenConnections)/2 { + stats["message"] = "Many idle connections are being closed, consider revising the connection pool settings." + } + + if dbStats.MaxLifetimeClosed > int64(dbStats.OpenConnections)/2 { + stats["message"] = "Many connections are being closed due to max lifetime, consider increasing max lifetime or revising the connection usage pattern." + } + + result[name] = stats + } + + // Check read replicas + for name, replicas := range s.readReplicas { + for i, db := range replicas { + if db == nil { + continue + } + + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + replicaName := fmt.Sprintf("%s_replica_%d", name, i) + stats := make(map[string]string) + + err := db.PingContext(ctx) + if err != nil { + stats["status"] = "down" + stats["error"] = fmt.Sprintf("read replica down: %v", err) + stats["type"] = "sql" + stats["role"] = "replica" + result[replicaName] = stats + continue + } + + stats["status"] = "up" + stats["message"] = "Read replica healthy" + stats["type"] = "sql" + stats["role"] = "replica" + + dbStats := db.Stats() + stats["open_connections"] = strconv.Itoa(dbStats.OpenConnections) + stats["in_use"] = strconv.Itoa(dbStats.InUse) + stats["idle"] = strconv.Itoa(dbStats.Idle) + + result[replicaName] = stats + } + } + + // Check MongoDB connections + for name, client := range s.mongoClients { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + stats := make(map[string]string) + + err := client.Ping(ctx, nil) + if err != nil { + stats["status"] = "down" + stats["error"] = fmt.Sprintf("mongodb down: %v", err) + stats["type"] = "mongodb" + result[name] = stats + continue + } + + stats["status"] = "up" + stats["message"] = "It's healthy" + stats["type"] = "mongodb" + + result[name] = stats + } + + return result +} + +// GetDB returns a specific SQL database connection by name +func (s *service) GetDB(name string) (*sql.DB, error) { + log.Printf("Attempting to get database connection for: %s", name) + s.mu.RLock() + defer s.mu.RUnlock() + + db, exists := s.sqlDatabases[name] + if !exists { + log.Printf("Error: database %s not found", name) // Log the error + return nil, fmt.Errorf("database %s not found", name) + } + + log.Printf("Current connection pool state for %s: Open: %d, In Use: %d, Idle: %d", + name, db.Stats().OpenConnections, db.Stats().InUse, db.Stats().Idle) + s.mu.RLock() + defer s.mu.RUnlock() + + // db, exists := s.sqlDatabases[name] + // if !exists { + // log.Printf("Error: database %s not found", name) // Log the error + // return nil, fmt.Errorf("database %s not found", name) + // } + + return db, nil +} + +// GetReadDB returns a read replica connection using round-robin load balancing +func (s *service) GetReadDB(name string) (*sql.DB, error) { + s.mu.RLock() + defer s.mu.RUnlock() + + replicas, exists := s.readReplicas[name] + if !exists || len(replicas) == 0 { + // Fallback to primary if no replicas available + return s.GetDB(name) + } + + // Round-robin load balancing + s.readBalancer[name] = (s.readBalancer[name] + 1) % len(replicas) + selected := replicas[s.readBalancer[name]] + + if selected == nil { + // Fallback to primary if replica is nil + return s.GetDB(name) + } + + return selected, nil +} + +// GetMongoClient returns a specific MongoDB client by name +func (s *service) GetMongoClient(name string) (*mongo.Client, error) { + s.mu.RLock() + defer s.mu.RUnlock() + + client, exists := s.mongoClients[name] + if !exists { + return nil, fmt.Errorf("MongoDB client %s not found", name) + } + + return client, nil +} + +// ListDBs returns list of available database names +func (s *service) ListDBs() []string { + s.mu.RLock() + defer s.mu.RUnlock() + + names := make([]string, 0, len(s.sqlDatabases)+len(s.mongoClients)) + + for name := range s.sqlDatabases { + names = append(names, name) + } + + for name := range s.mongoClients { + names = append(names, name) + } + + return names +} + +// GetDBType returns the type of a specific database +func (s *service) GetDBType(name string) (DatabaseType, error) { + s.mu.RLock() + defer s.mu.RUnlock() + + config, exists := s.configs[name] + if !exists { + return "", fmt.Errorf("database %s not found", name) + } + + return DatabaseType(config.Type), nil +} + +// Close closes all database connections +func (s *service) Close() error { + s.mu.Lock() + defer s.mu.Unlock() + + var errs []error + + for name, db := range s.sqlDatabases { + if err := db.Close(); err != nil { + errs = append(errs, fmt.Errorf("failed to close database %s: %w", name, err)) + } else { + log.Printf("Disconnected from SQL database: %s", name) + } + } + + for name, replicas := range s.readReplicas { + for i, db := range replicas { + if db != nil { + if err := db.Close(); err != nil { + errs = append(errs, fmt.Errorf("failed to close read replica %s[%d]: %w", name, i, err)) + } else { + log.Printf("Disconnected from read replica: %s[%d]", name, i) + } + } + } + } + + for name, client := range s.mongoClients { + if err := client.Disconnect(context.Background()); err != nil { + errs = append(errs, fmt.Errorf("failed to disconnect MongoDB client %s: %w", name, err)) + } else { + log.Printf("Disconnected from MongoDB: %s", name) + } + } + + s.sqlDatabases = make(map[string]*sql.DB) + s.mongoClients = make(map[string]*mongo.Client) + s.readReplicas = make(map[string][]*sql.DB) + s.configs = make(map[string]config.DatabaseConfig) + s.readConfigs = make(map[string][]config.DatabaseConfig) + + if len(errs) > 0 { + return fmt.Errorf("errors closing databases: %v", errs) + } + + return nil +} + +// GetPrimaryDB returns primary database connection +func (s *service) GetPrimaryDB(name string) (*sql.DB, error) { + return s.GetDB(name) +} + +// ListenForChanges implements PostgreSQL LISTEN/NOTIFY for real-time updates +func (s *service) ListenForChanges(ctx context.Context, dbName string, channels []string, callback func(string, string)) error { + s.mu.RLock() + config, exists := s.configs[dbName] + s.mu.RUnlock() + + if !exists { + return fmt.Errorf("database %s not found", dbName) + } + + // Only support PostgreSQL for LISTEN/NOTIFY + if DatabaseType(config.Type) != Postgres { + return fmt.Errorf("LISTEN/NOTIFY only supported for PostgreSQL databases") + } + + // Create connection string for listener + connStr := fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=%s", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + config.SSLMode, + ) + + // Create listener + listener := pq.NewListener( + connStr, + 10*time.Second, + time.Minute, + func(ev pq.ListenerEventType, err error) { + if err != nil { + log.Printf("Database listener (%s) error: %v", dbName, err) + } + }, + ) + + // Store listener for cleanup + s.listenersMu.Lock() + s.listeners[dbName] = listener + s.listenersMu.Unlock() + + // Listen to specified channels + for _, channel := range channels { + err := listener.Listen(channel) + if err != nil { + listener.Close() + return fmt.Errorf("failed to listen to channel %s: %w", channel, err) + } + log.Printf("Listening to database channel: %s on %s", channel, dbName) + } + + // Start listening loop + go func() { + defer func() { + listener.Close() + s.listenersMu.Lock() + delete(s.listeners, dbName) + s.listenersMu.Unlock() + log.Printf("Database listener for %s stopped", dbName) + }() + + for { + select { + case n := <-listener.Notify: + if n != nil { + callback(n.Channel, n.Extra) + } + case <-ctx.Done(): + return + case <-time.After(90 * time.Second): + // Send ping to keep connection alive + go func() { + if err := listener.Ping(); err != nil { + log.Printf("Listener ping failed for %s: %v", dbName, err) + } + }() + } + } + }() + + return nil +} + +// NotifyChange sends a notification to a PostgreSQL channel +func (s *service) NotifyChange(dbName, channel, payload string) error { + db, err := s.GetDB(dbName) + if err != nil { + return fmt.Errorf("failed to get database %s: %w", dbName, err) + } + + // Check if it's PostgreSQL + s.mu.RLock() + config, exists := s.configs[dbName] + s.mu.RUnlock() + + if !exists { + return fmt.Errorf("database %s configuration not found", dbName) + } + + if DatabaseType(config.Type) != Postgres { + return fmt.Errorf("NOTIFY only supported for PostgreSQL databases") + } + + // Execute NOTIFY + query := "SELECT pg_notify($1, $2)" + _, err = db.Exec(query, channel, payload) + if err != nil { + return fmt.Errorf("failed to send notification: %w", err) + } + + log.Printf("Sent notification to channel %s on %s: %s", channel, dbName, payload) + return nil +} diff --git a/internal/handlers/auth/auth.go b/internal/handlers/auth/auth.go new file mode 100644 index 0000000..3bd74dd --- /dev/null +++ b/internal/handlers/auth/auth.go @@ -0,0 +1,132 @@ +package handlers + +import ( + models "api-service/internal/models/auth" + services "api-service/internal/services/auth" + "net/http" + + "github.com/gin-gonic/gin" +) + +// AuthHandler handles authentication endpoints +type AuthHandler struct { + authService *services.AuthService +} + +// NewAuthHandler creates a new authentication handler +func NewAuthHandler(authService *services.AuthService) *AuthHandler { + return &AuthHandler{ + authService: authService, + } +} + +// Login godoc +// @Summary Login user and get JWT token +// @Description Authenticate user with username and password to receive JWT token +// @Tags Authentication +// @Accept json +// @Produce json +// @Param login body models.LoginRequest true "Login credentials" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Failure 401 {object} map[string]string "Unauthorized" +// @Router /api/v1/auth/login [post] +func (h *AuthHandler) Login(c *gin.Context) { + var loginReq models.LoginRequest + + // Bind JSON request + if err := c.ShouldBindJSON(&loginReq); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Authenticate user + tokenResponse, err := h.authService.Login(loginReq.Username, loginReq.Password) + if err != nil { + c.JSON(http.StatusUnauthorized, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, tokenResponse) +} + +// RefreshToken godoc +// @Summary Refresh JWT token +// @Description Refresh the JWT token using a valid refresh token +// @Tags Authentication +// @Accept json +// @Produce json +// @Param refresh body map[string]string true "Refresh token" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Failure 401 {object} map[string]string "Unauthorized" +// @Router /api/v1/auth/refresh [post] +func (h *AuthHandler) RefreshToken(c *gin.Context) { + // For now, this is a placeholder for refresh token functionality + // In a real implementation, you would handle refresh tokens here + c.JSON(http.StatusNotImplemented, gin.H{"error": "refresh token not implemented"}) +} + +// Register godoc +// @Summary Register new user +// @Description Register a new user account +// @Tags Authentication +// @Accept json +// @Produce json +// @Param register body map[string]string true "Registration data" +// @Success 201 {object} map[string]string +// @Failure 400 {object} map[string]string "Bad request" +// @Router /api/v1/auth/register [post] +func (h *AuthHandler) Register(c *gin.Context) { + var registerReq struct { + Username string `json:"username" binding:"required"` + Email string `json:"email" binding:"required,email"` + Password string `json:"password" binding:"required,min=6"` + Role string `json:"role" binding:"required"` + } + + if err := c.ShouldBindJSON(®isterReq); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + err := h.authService.RegisterUser( + registerReq.Username, + registerReq.Email, + registerReq.Password, + registerReq.Role, + ) + + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusCreated, gin.H{"message": "user registered successfully"}) +} + +// Me godoc +// @Summary Get current user info +// @Description Get information about the currently authenticated user +// @Tags Authentication +// @Produce json +// @Security Bearer +// @Success 200 {object} models.User +// @Failure 401 {object} map[string]string "Unauthorized" +// @Router /api/v1/auth/me [get] +func (h *AuthHandler) Me(c *gin.Context) { + // Get user info from context (set by middleware) + userID, exists := c.Get("user_id") + if !exists { + c.JSON(http.StatusUnauthorized, gin.H{"error": "user not authenticated"}) + return + } + + // In a real implementation, you would fetch user details from database + c.JSON(http.StatusOK, gin.H{ + "id": userID, + "username": c.GetString("username"), + "email": c.GetString("email"), + "role": c.GetString("role"), + }) +} diff --git a/internal/handlers/auth/token.go b/internal/handlers/auth/token.go new file mode 100644 index 0000000..02383c7 --- /dev/null +++ b/internal/handlers/auth/token.go @@ -0,0 +1,95 @@ +package handlers + +import ( + models "api-service/internal/models/auth" + services "api-service/internal/services/auth" + "net/http" + + "github.com/gin-gonic/gin" +) + +// TokenHandler handles token generation endpoints +type TokenHandler struct { + authService *services.AuthService +} + +// NewTokenHandler creates a new token handler +func NewTokenHandler(authService *services.AuthService) *TokenHandler { + return &TokenHandler{ + authService: authService, + } +} + +// GenerateToken godoc +// @Summary Generate JWT token +// @Description Generate a JWT token for a user +// @Tags Token +// @Accept json +// @Produce json +// @Param token body models.LoginRequest true "User credentials" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Failure 401 {object} map[string]string "Unauthorized" +// @Router /api/v1/token/generate [post] +func (h *TokenHandler) GenerateToken(c *gin.Context) { + var loginReq models.LoginRequest + + // Bind JSON request + if err := c.ShouldBindJSON(&loginReq); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Generate token + tokenResponse, err := h.authService.Login(loginReq.Username, loginReq.Password) + if err != nil { + c.JSON(http.StatusUnauthorized, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, tokenResponse) +} + +// GenerateTokenDirect godoc +// @Summary Generate token directly +// @Description Generate a JWT token directly without password verification (for testing) +// @Tags Token +// @Accept json +// @Produce json +// @Param user body map[string]string true "User info" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Router /api/v1/token/generate-direct [post] +func (h *TokenHandler) GenerateTokenDirect(c *gin.Context) { + var req struct { + Username string `json:"username" binding:"required"` + Email string `json:"email" binding:"required"` + Role string `json:"role" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Create a temporary user for token generation + user := &models.User{ + ID: "temp-" + req.Username, + Username: req.Username, + Email: req.Email, + Role: req.Role, + } + + // Generate token directly + token, err := h.authService.GenerateTokenForUser(user) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, models.TokenResponse{ + AccessToken: token, + TokenType: "Bearer", + ExpiresIn: 3600, + }) +} diff --git a/internal/handlers/healthcheck/healthcheck.go b/internal/handlers/healthcheck/healthcheck.go new file mode 100644 index 0000000..d109bff --- /dev/null +++ b/internal/handlers/healthcheck/healthcheck.go @@ -0,0 +1,24 @@ +package healthcheck + +import ( + "api-service/internal/database" + "net/http" + + "github.com/gin-gonic/gin" +) + +// HealthCheckHandler handles health check requests +type HealthCheckHandler struct { + dbService database.Service +} + +// NewHealthCheckHandler creates a new HealthCheckHandler +func NewHealthCheckHandler(dbService database.Service) *HealthCheckHandler { + return &HealthCheckHandler{dbService: dbService} +} + +// CheckHealth checks the health of the application +func (h *HealthCheckHandler) CheckHealth(c *gin.Context) { + healthStatus := h.dbService.Health() // Call the health check function from the database service + c.JSON(http.StatusOK, healthStatus) +} diff --git a/internal/handlers/retribusi/retribusi.go b/internal/handlers/retribusi/retribusi.go new file mode 100644 index 0000000..b5e9a94 --- /dev/null +++ b/internal/handlers/retribusi/retribusi.go @@ -0,0 +1,1401 @@ +package handlers + +import ( + "api-service/internal/config" + "api-service/internal/database" + models "api-service/internal/models" + "api-service/internal/models/retribusi" + utils "api-service/internal/utils/filters" + "api-service/internal/utils/validation" + "api-service/pkg/logger" + "context" + "database/sql" + "fmt" + "net/http" + "strconv" + "strings" + "sync" + "time" + + "github.com/gin-gonic/gin" + "github.com/go-playground/validator/v10" + "github.com/google/uuid" +) + +var ( + db database.Service + once sync.Once + validate *validator.Validate +) + +// Initialize the database connection and validator +func init() { + once.Do(func() { + db = database.New(config.LoadConfig()) + validate = validator.New() + + // Register custom validations if needed + validate.RegisterValidation("retribusi_status", validateRetribusiStatus) + + if db == nil { + logger.Fatal("Failed to initialize database connection") + } + }) +} + +// Custom validation for retribusi status +func validateRetribusiStatus(fl validator.FieldLevel) bool { + return models.IsValidStatus(fl.Field().String()) +} + +// RetribusiHandler handles retribusi services +type RetribusiHandler struct { + db database.Service +} + +// NewRetribusiHandler creates a new RetribusiHandler +func NewRetribusiHandler() *RetribusiHandler { + return &RetribusiHandler{ + db: db, + } +} + +// GetRetribusi godoc +// @Summary Get retribusi with pagination and optional aggregation +// @Description Returns a paginated list of retribusis with optional summary statistics +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param limit query int false "Limit (max 100)" default(10) +// @Param offset query int false "Offset" default(0) +// @Param include_summary query bool false "Include aggregation summary" default(false) +// @Param status query string false "Filter by status" +// @Param jenis query string false "Filter by jenis" +// @Param dinas query string false "Filter by dinas" +// @Param search query string false "Search in multiple fields" +// @Success 200 {object} retribusi.RetribusiGetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusis [get] +func (h *RetribusiHandler) GetRetribusi(c *gin.Context) { + // Parse pagination parameters + limit, offset, err := h.parsePaginationParams(c) + if err != nil { + h.respondError(c, "Invalid pagination parameters", err, http.StatusBadRequest) + return + } + + // Parse filter parameters + filter := h.parseFilterParams(c) + includeAggregation := c.Query("include_summary") == "true" + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Create context with timeout + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute concurrent operations + var ( + retribusis []retribusi.Retribusi + total int + aggregateData *models.AggregateData + wg sync.WaitGroup + errChan = make(chan error, 3) + mu sync.Mutex + ) + + // Fetch total count + wg.Add(1) + go func() { + defer wg.Done() + if err := h.getTotalCount(ctx, dbConn, filter, &total); err != nil { + mu.Lock() + errChan <- fmt.Errorf("failed to get total count: %w", err) + mu.Unlock() + } + }() + + // Fetch main data + wg.Add(1) + go func() { + defer wg.Done() + result, err := h.fetchRetribusis(ctx, dbConn, filter, limit, offset) + mu.Lock() + if err != nil { + errChan <- fmt.Errorf("failed to fetch data: %w", err) + } else { + retribusis = result + } + mu.Unlock() + }() + + // Fetch aggregation data if requested + if includeAggregation { + wg.Add(1) + go func() { + defer wg.Done() + result, err := h.getAggregateData(ctx, dbConn, filter) + mu.Lock() + if err != nil { + errChan <- fmt.Errorf("failed to get aggregate data: %w", err) + } else { + aggregateData = result + } + mu.Unlock() + }() + } + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + h.logAndRespondError(c, "Data processing failed", err, http.StatusInternalServerError) + return + } + } + + // Build response + meta := h.calculateMeta(limit, offset, total) + response := retribusi.RetribusiGetResponse{ + Message: "Data retribusi berhasil diambil", + Data: retribusis, + Meta: meta, + } + + if includeAggregation && aggregateData != nil { + response.Summary = aggregateData + } + + c.JSON(http.StatusOK, response) +} + +// GetRetribusiByID godoc +// @Summary Get Retribusi by ID +// @Description Returns a single retribusi by ID +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param id path string true "Retribusi ID (UUID)" +// @Success 200 {object} retribusi.RetribusiGetByIDResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "Retribusi not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusi/{id} [get] +func (h *RetribusiHandler) GetRetribusiByID(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + dataretribusi, err := h.getRetribusiByID(ctx, dbConn, id) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "Retribusi not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to get retribusi", err, http.StatusInternalServerError) + } + return + } + + response := retribusi.RetribusiGetByIDResponse{ + Message: "Retribusi details retrieved successfully", + Data: dataretribusi, + } + + c.JSON(http.StatusOK, response) +} + +// GetRetribusiDynamic godoc +// @Summary Get retribusi with dynamic filtering +// @Description Returns retribusis with advanced dynamic filtering like Directus +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param fields query string false "Fields to select (e.g., fields=*.*)" +// @Param filter[column][operator] query string false "Dynamic filters (e.g., filter[Jenis][_eq]=value)" +// @Param sort query string false "Sort fields (e.g., sort=date_created,-Jenis)" +// @Param limit query int false "Limit" default(10) +// @Param offset query int false "Offset" default(0) +// @Success 200 {object} retribusi.RetribusiGetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusis/dynamic [get] +func (h *RetribusiHandler) GetRetribusiDynamic(c *gin.Context) { + // Parse query parameters + parser := utils.NewQueryParser().SetLimits(10, 100) + dynamicQuery, err := parser.ParseQuery(c.Request.URL.Query()) + if err != nil { + h.respondError(c, "Invalid query parameters", err, http.StatusBadRequest) + return + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Create context with timeout + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute query with dynamic filtering + retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, dynamicQuery) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, total) + response := retribusi.RetribusiGetResponse{ + Message: "Data retribusi berhasil diambil", + Data: retribusis, + Meta: meta, + } + + c.JSON(http.StatusOK, response) +} + +// fetchRetribusisDynamic executes dynamic query +func (h *RetribusiHandler) fetchRetribusisDynamic(ctx context.Context, dbConn *sql.DB, query utils.DynamicQuery) ([]retribusi.Retribusi, int, error) { + // Setup query builder + countBuilder := utils.NewQueryBuilder("data_retribusi"). + SetColumnMapping(map[string]string{ + "jenis": "Jenis", + "pelayanan": "Pelayanan", + "dinas": "Dinas", + "kelompok_obyek": "Kelompok_obyek", + "Kode_tarif": "Kode_tarif", + "kode_tarif": "Kode_tarif", + "tarif": "Tarif", + "satuan": "Satuan", + "tarif_overtime": "Tarif_overtime", + "satuan_overtime": "Satuan_overtime", + "rekening_pokok": "Rekening_pokok", + "rekening_denda": "Rekening_denda", + "uraian_1": "Uraian_1", + "uraian_2": "Uraian_2", + "uraian_3": "Uraian_3", + }). + SetAllowedColumns([]string{ + "id", "status", "sort", "user_created", "date_created", + "user_updated", "date_updated", "Jenis", "Pelayanan", + "Dinas", "Kelompok_obyek", "Kode_tarif", "Tarif", "Satuan", + "Tarif_overtime", "Satuan_overtime", "Rekening_pokok", + "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", + }) + + mainBuilder := utils.NewQueryBuilder("data_retribusi"). + SetColumnMapping(map[string]string{ + "jenis": "Jenis", + "pelayanan": "Pelayanan", + "dinas": "Dinas", + "kelompok_obyek": "Kelompok_obyek", + "Kode_tarif": "Kode_tarif", + "kode_tarif": "Kode_tarif", + "tarif": "Tarif", + "satuan": "Satuan", + "tarif_overtime": "Tarif_overtime", + "satuan_overtime": "Satuan_overtime", + "rekening_pokok": "Rekening_pokok", + "rekening_denda": "Rekening_denda", + "uraian_1": "Uraian_1", + "uraian_2": "Uraian_2", + "uraian_3": "Uraian_3", + }). + SetAllowedColumns([]string{ + "id", "status", "sort", "user_created", "date_created", + "user_updated", "date_updated", "Jenis", "Pelayanan", + "Dinas", "Kelompok_obyek", "Kode_tarif", "Tarif", "Satuan", + "Tarif_overtime", "Satuan_overtime", "Rekening_pokok", + "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", + }) + + // Add default filter to exclude deleted records + if len(query.Filters) > 0 { + query.Filters = append([]utils.FilterGroup{{ + Filters: []utils.DynamicFilter{{ + Column: "status", + Operator: utils.OpNotEqual, + Value: "deleted", + }}, + LogicOp: "AND", + }}, query.Filters...) + } else { + query.Filters = []utils.FilterGroup{{ + Filters: []utils.DynamicFilter{{ + Column: "status", + Operator: utils.OpNotEqual, + Value: "deleted", + }}, + LogicOp: "AND", + }} + } + + // Execute queries sequentially to avoid race conditions + var total int + var retribusis []retribusi.Retribusi + + // 1. Get total count first + countQuery := query + countQuery.Limit = 0 + countQuery.Offset = 0 + + countSQL, countArgs, err := countBuilder.BuildCountQuery(countQuery) + if err != nil { + return nil, 0, fmt.Errorf("failed to build count query: %w", err) + } + + if err := dbConn.QueryRowContext(ctx, countSQL, countArgs...).Scan(&total); err != nil { + return nil, 0, fmt.Errorf("failed to get total count: %w", err) + } + + // 2. Get main data + mainSQL, mainArgs, err := mainBuilder.BuildQuery(query) + if err != nil { + return nil, 0, fmt.Errorf("failed to build main query: %w", err) + } + + rows, err := dbConn.QueryContext(ctx, mainSQL, mainArgs...) + if err != nil { + return nil, 0, fmt.Errorf("failed to execute main query: %w", err) + } + defer rows.Close() + + for rows.Next() { + retribusi, err := h.scanRetribusi(rows) + if err != nil { + return nil, 0, fmt.Errorf("failed to scan retribusi: %w", err) + } + retribusis = append(retribusis, retribusi) + } + + if err := rows.Err(); err != nil { + return nil, 0, fmt.Errorf("rows iteration error: %w", err) + } + + return retribusis, total, nil +} + +// SearchRetribusiAdvanced provides advanced search capabilities +func (h *RetribusiHandler) SearchRetribusiAdvanced(c *gin.Context) { + // Parse complex search parameters + searchQuery := c.Query("q") + if searchQuery == "" { + // If no search query provided, return all records with default sorting + query := utils.DynamicQuery{ + Fields: []string{"*"}, + Filters: []utils.FilterGroup{}, // Empty filters - fetchRetribusisDynamic will add default deleted filter + Sort: []utils.SortField{{ + Column: "date_created", + Order: "DESC", + }}, + Limit: 20, + Offset: 0, + } + + // Parse pagination if provided + if limit := c.Query("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { + query.Limit = l + } + } + + if offset := c.Query("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute query to get all records + retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, total) + response := retribusi.RetribusiGetResponse{ + Message: "All records retrieved (no search query provided)", + Data: retribusis, + Meta: meta, + } + + c.JSON(http.StatusOK, response) + return + } + + // Build dynamic query for search + query := utils.DynamicQuery{ + Fields: []string{"*"}, + Filters: []utils.FilterGroup{{ + Filters: []utils.DynamicFilter{ + { + Column: "Jenis", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + { + Column: "Pelayanan", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + { + Column: "Dinas", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + { + Column: "Uraian_1", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + }, + LogicOp: "AND", + }}, + Sort: []utils.SortField{{ + Column: "date_created", + Order: "DESC", + }}, + Limit: 20, + Offset: 0, + } + + // Parse pagination if provided + if limit := c.Query("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { + query.Limit = l + } + } + + if offset := c.Query("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute search + retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Search failed", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, total) + response := retribusi.RetribusiGetResponse{ + Message: fmt.Sprintf("Search results for '%s'", searchQuery), + Data: retribusis, + Meta: meta, + } + + c.JSON(http.StatusOK, response) +} + +// CreateRetribusi godoc +// @Summary Create retribusi +// @Description Creates a new retribusi record +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param request body retribusi.RetribusiCreateRequest true "Retribusi creation request" +// @Success 201 {object} retribusi.RetribusiCreateResponse "Retribusi created successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusis [post] +func (h *RetribusiHandler) CreateRetribusi(c *gin.Context) { + var req retribusi.RetribusiCreateRequest + + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + + // Validate request + if err := validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + // Validate duplicate and daily submission + if err := h.validateRetribusiSubmission(ctx, dbConn, &req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dataretribusi, err := h.createRetribusi(ctx, dbConn, &req) + if err != nil { + h.logAndRespondError(c, "Failed to create retribusi", err, http.StatusInternalServerError) + return + } + + response := retribusi.RetribusiCreateResponse{ + Message: "Retribusi berhasil dibuat", + Data: dataretribusi, + } + + c.JSON(http.StatusCreated, response) +} + +// UpdateRetribusi godoc +// @Summary Update retribusi +// @Description Updates an existing retribusi record +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param id path string true "Retribusi ID (UUID)" +// @Param request body retribusi.RetribusiUpdateRequest true "Retribusi update request" +// @Success 200 {object} retribusi.RetribusiUpdateResponse "Retribusi updated successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 404 {object} models.ErrorResponse "Retribusi not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusi/{id} [put] +func (h *RetribusiHandler) UpdateRetribusi(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + var req retribusi.RetribusiUpdateRequest + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + + // Set ID from path parameter + req.ID = id + + // Validate request + if err := validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + dataretribusi, err := h.updateRetribusi(ctx, dbConn, &req) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "Retribusi not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to update retribusi", err, http.StatusInternalServerError) + } + return + } + + response := retribusi.RetribusiUpdateResponse{ + Message: "Retribusi berhasil diperbarui", + Data: dataretribusi, + } + + c.JSON(http.StatusOK, response) +} + +// DeleteRetribusi godoc +// @Summary Delete retribusi +// @Description Soft deletes a retribusi by setting status to 'deleted' +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param id path string true "Retribusi ID (UUID)" +// @Success 200 {object} retribusi.RetribusiDeleteResponse "Retribusi deleted successfully" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "Retribusi not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusi/{id} [delete] +func (h *RetribusiHandler) DeleteRetribusi(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + err = h.deleteRetribusi(ctx, dbConn, id) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "Retribusi not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to delete retribusi", err, http.StatusInternalServerError) + } + return + } + + response := retribusi.RetribusiDeleteResponse{ + Message: "Retribusi berhasil dihapus", + ID: id, + } + + c.JSON(http.StatusOK, response) +} + +// GetRetribusiStats godoc +// @Summary Get retribusi statistics +// @Description Returns comprehensive statistics about retribusi data +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param status query string false "Filter statistics by status" +// @Success 200 {object} models.AggregateData "Statistics data" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusis/stats [get] +func (h *RetribusiHandler) GetRetribusiStats(c *gin.Context) { + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + filter := h.parseFilterParams(c) + aggregateData, err := h.getAggregateData(ctx, dbConn, filter) + if err != nil { + h.logAndRespondError(c, "Failed to get statistics", err, http.StatusInternalServerError) + return + } + + c.JSON(http.StatusOK, gin.H{ + "message": "Statistik retribusi berhasil diambil", + "data": aggregateData, + }) +} + +// Get retribusi by ID +func (h *RetribusiHandler) getRetribusiByID(ctx context.Context, dbConn *sql.DB, id string) (*retribusi.Retribusi, error) { + query := ` + SELECT + id, status, sort, user_created, date_created, user_updated, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3" + FROM data_retribusi + WHERE id = $1 AND status != 'deleted'` + + row := dbConn.QueryRowContext(ctx, query, id) + + var retribusi retribusi.Retribusi + err := row.Scan( + &retribusi.ID, &retribusi.Status, &retribusi.Sort, &retribusi.UserCreated, + &retribusi.DateCreated, &retribusi.UserUpdated, &retribusi.DateUpdated, + &retribusi.Jenis, &retribusi.Pelayanan, &retribusi.Dinas, &retribusi.KelompokObyek, + &retribusi.KodeTarif, &retribusi.Tarif, &retribusi.Satuan, &retribusi.TarifOvertime, + &retribusi.SatuanOvertime, &retribusi.RekeningPokok, &retribusi.RekeningDenda, + &retribusi.Uraian1, &retribusi.Uraian2, &retribusi.Uraian3, + ) + + if err != nil { + return nil, err + } + + return &retribusi, nil +} + +// Create retribusi +func (h *RetribusiHandler) createRetribusi(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiCreateRequest) (*retribusi.Retribusi, error) { + id := uuid.New().String() + now := time.Now() + + query := ` + INSERT INTO data_retribusi ( + id, status, date_created, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3" + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) + RETURNING + id, status, sort, user_created, date_created, user_updated, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3"` + + row := dbConn.QueryRowContext(ctx, query, + id, req.Status, now, now, + req.Jenis, req.Pelayanan, req.Dinas, req.KelompokObyek, req.KodeTarif, + req.Tarif, req.Satuan, req.TarifOvertime, req.SatuanOvertime, + req.RekeningPokok, req.RekeningDenda, req.Uraian1, req.Uraian2, req.Uraian3, + ) + + var retribusi retribusi.Retribusi + err := row.Scan( + &retribusi.ID, &retribusi.Status, &retribusi.Sort, &retribusi.UserCreated, + &retribusi.DateCreated, &retribusi.UserUpdated, &retribusi.DateUpdated, + &retribusi.Jenis, &retribusi.Pelayanan, &retribusi.Dinas, &retribusi.KelompokObyek, + &retribusi.KodeTarif, &retribusi.Tarif, &retribusi.Satuan, &retribusi.TarifOvertime, + &retribusi.SatuanOvertime, &retribusi.RekeningPokok, &retribusi.RekeningDenda, + &retribusi.Uraian1, &retribusi.Uraian2, &retribusi.Uraian3, + ) + + if err != nil { + return nil, fmt.Errorf("failed to create retribusi: %w", err) + } + + return &retribusi, nil +} + +// Update retribusi +func (h *RetribusiHandler) updateRetribusi(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiUpdateRequest) (*retribusi.Retribusi, error) { + now := time.Now() + + query := ` + UPDATE data_retribusi SET + status = $2, date_updated = $3, + "Jenis" = $4, "Pelayanan" = $5, "Dinas" = $6, "Kelompok_obyek" = $7, "Kode_tarif" = $8, + "Tarif" = $9, "Satuan" = $10, "Tarif_overtime" = $11, "Satuan_overtime" = $12, + "Rekening_pokok" = $13, "Rekening_denda" = $14, "Uraian_1" = $15, "Uraian_2" = $16, "Uraian_3" = $17 + WHERE id = $1 AND status != 'deleted' + RETURNING + id, status, sort, user_created, date_created, user_updated, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3"` + + row := dbConn.QueryRowContext(ctx, query, + req.ID, req.Status, now, + req.Jenis, req.Pelayanan, req.Dinas, req.KelompokObyek, req.KodeTarif, + req.Tarif, req.Satuan, req.TarifOvertime, req.SatuanOvertime, + req.RekeningPokok, req.RekeningDenda, req.Uraian1, req.Uraian2, req.Uraian3, + ) + + var retribusi retribusi.Retribusi + err := row.Scan( + &retribusi.ID, &retribusi.Status, &retribusi.Sort, &retribusi.UserCreated, + &retribusi.DateCreated, &retribusi.UserUpdated, &retribusi.DateUpdated, + &retribusi.Jenis, &retribusi.Pelayanan, &retribusi.Dinas, &retribusi.KelompokObyek, + &retribusi.KodeTarif, &retribusi.Tarif, &retribusi.Satuan, &retribusi.TarifOvertime, + &retribusi.SatuanOvertime, &retribusi.RekeningPokok, &retribusi.RekeningDenda, + &retribusi.Uraian1, &retribusi.Uraian2, &retribusi.Uraian3, + ) + + if err != nil { + return nil, fmt.Errorf("failed to update retribusi: %w", err) + } + + return &retribusi, nil +} + +// Soft delete retribusi +func (h *RetribusiHandler) deleteRetribusi(ctx context.Context, dbConn *sql.DB, id string) error { + now := time.Now() + + query := `UPDATE data_retribusi SET status = 'deleted', date_updated = $2 WHERE id = $1 AND status != 'deleted'` + + result, err := dbConn.ExecContext(ctx, query, id, now) + if err != nil { + return fmt.Errorf("failed to delete retribusi: %w", err) + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("failed to get affected rows: %w", err) + } + + if rowsAffected == 0 { + return sql.ErrNoRows + } + + return nil +} + +// Enhanced error handling +func (h *RetribusiHandler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) { + logger.Error(message, map[string]interface{}{ + "error": err.Error(), + "status_code": statusCode, + }) + h.respondError(c, message, err, statusCode) +} + +func (h *RetribusiHandler) respondError(c *gin.Context, message string, err error, statusCode int) { + errorMessage := message + if gin.Mode() == gin.ReleaseMode { + errorMessage = "Internal server error" + } + + c.JSON(statusCode, models.ErrorResponse{ + Error: errorMessage, + Code: statusCode, + Message: err.Error(), + Timestamp: time.Now(), + }) +} + +// Parse pagination parameters dengan validation yang lebih ketat +func (h *RetribusiHandler) parsePaginationParams(c *gin.Context) (int, int, error) { + limit := 10 // Default limit + offset := 0 // Default offset + + if limitStr := c.Query("limit"); limitStr != "" { + parsedLimit, err := strconv.Atoi(limitStr) + if err != nil { + return 0, 0, fmt.Errorf("invalid limit parameter: %s", limitStr) + } + if parsedLimit <= 0 { + return 0, 0, fmt.Errorf("limit must be greater than 0") + } + if parsedLimit > 100 { + return 0, 0, fmt.Errorf("limit cannot exceed 100") + } + limit = parsedLimit + } + + if offsetStr := c.Query("offset"); offsetStr != "" { + parsedOffset, err := strconv.Atoi(offsetStr) + if err != nil { + return 0, 0, fmt.Errorf("invalid offset parameter: %s", offsetStr) + } + if parsedOffset < 0 { + return 0, 0, fmt.Errorf("offset cannot be negative") + } + offset = parsedOffset + } + + logger.Debug("Pagination parameters", map[string]interface{}{ + "limit": limit, + "offset": offset, + }) + return limit, offset, nil +} + +// Build WHERE clause dengan filter parameters +func (h *RetribusiHandler) buildWhereClause(filter retribusi.RetribusiFilter) (string, []interface{}) { + conditions := []string{"status != 'deleted'"} + args := []interface{}{} + paramCount := 1 + + if filter.Status != nil { + conditions = append(conditions, fmt.Sprintf("status = $%d", paramCount)) + args = append(args, *filter.Status) + paramCount++ + } + + if filter.Jenis != nil { + conditions = append(conditions, fmt.Sprintf(`"Jenis" ILIKE $%d`, paramCount)) + args = append(args, "%"+*filter.Jenis+"%") + paramCount++ + } + + if filter.Dinas != nil { + conditions = append(conditions, fmt.Sprintf(`"Dinas" ILIKE $%d`, paramCount)) + args = append(args, "%"+*filter.Dinas+"%") + paramCount++ + } + + if filter.KelompokObyek != nil { + conditions = append(conditions, fmt.Sprintf(`"Kelompok_obyek" ILIKE $%d`, paramCount)) + args = append(args, "%"+*filter.KelompokObyek+"%") + paramCount++ + } + + if filter.Search != nil { + searchCondition := fmt.Sprintf(`( + "Jenis" ILIKE $%d OR + "Pelayanan" ILIKE $%d OR + "Dinas" ILIKE $%d OR + "Kode_tarif" ILIKE $%d OR + "Uraian_1" ILIKE $%d OR + "Uraian_2" ILIKE $%d OR + "Uraian_3" ILIKE $%d + )`, paramCount, paramCount, paramCount, paramCount, paramCount, paramCount, paramCount) + conditions = append(conditions, searchCondition) + searchTerm := "%" + *filter.Search + "%" + args = append(args, searchTerm) + paramCount++ + } + + if filter.DateFrom != nil { + conditions = append(conditions, fmt.Sprintf("date_created >= $%d", paramCount)) + args = append(args, *filter.DateFrom) + paramCount++ + } + + if filter.DateTo != nil { + conditions = append(conditions, fmt.Sprintf("date_created <= $%d", paramCount)) + args = append(args, filter.DateTo.Add(24*time.Hour-time.Nanosecond)) // End of day + paramCount++ + } + + return strings.Join(conditions, " AND "), args +} + +// Optimized scanning function yang menggunakan sql.Null* types langsung +func (h *RetribusiHandler) scanRetribusi(rows *sql.Rows) (retribusi.Retribusi, error) { + var retribusi retribusi.Retribusi + + return retribusi, rows.Scan( + &retribusi.ID, + &retribusi.Status, + &retribusi.Sort, + &retribusi.UserCreated, + &retribusi.DateCreated, + &retribusi.UserUpdated, + &retribusi.DateUpdated, + &retribusi.Jenis, + &retribusi.Pelayanan, + &retribusi.Dinas, + &retribusi.KelompokObyek, + &retribusi.KodeTarif, + &retribusi.Tarif, + &retribusi.Satuan, + &retribusi.TarifOvertime, + &retribusi.SatuanOvertime, + &retribusi.RekeningPokok, + &retribusi.RekeningDenda, + &retribusi.Uraian1, + &retribusi.Uraian2, + &retribusi.Uraian3, + ) +} + +// Parse filter parameters dari query string +func (h *RetribusiHandler) parseFilterParams(c *gin.Context) retribusi.RetribusiFilter { + filter := retribusi.RetribusiFilter{} + + if status := c.Query("status"); status != "" { + if models.IsValidStatus(status) { + filter.Status = &status + } + } + + if jenis := c.Query("jenis"); jenis != "" { + filter.Jenis = &jenis + } + + if dinas := c.Query("dinas"); dinas != "" { + filter.Dinas = &dinas + } + + if kelompokObyek := c.Query("kelompok_obyek"); kelompokObyek != "" { + filter.KelompokObyek = &kelompokObyek + } + + if search := c.Query("search"); search != "" { + filter.Search = &search + } + + // Parse date filters + if dateFromStr := c.Query("date_from"); dateFromStr != "" { + if dateFrom, err := time.Parse("2006-01-02", dateFromStr); err == nil { + filter.DateFrom = &dateFrom + } + } + + if dateToStr := c.Query("date_to"); dateToStr != "" { + if dateTo, err := time.Parse("2006-01-02", dateToStr); err == nil { + filter.DateTo = &dateTo + } + } + + return filter +} + +// Get comprehensive aggregate data dengan filter support +func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, filter retribusi.RetribusiFilter) (*models.AggregateData, error) { + aggregate := &models.AggregateData{ + ByStatus: make(map[string]int), + ByDinas: make(map[string]int), + ByJenis: make(map[string]int), + } + + // Build where clause untuk filter + whereClause, args := h.buildWhereClause(filter) + + // Use concurrent execution untuk performance + var wg sync.WaitGroup + var mu sync.Mutex + errChan := make(chan error, 4) + + // 1. Count by status + wg.Add(1) + go func() { + defer wg.Done() + statusQuery := fmt.Sprintf(` + SELECT status, COUNT(*) + FROM data_retribusi + WHERE %s + GROUP BY status + ORDER BY status`, whereClause) + + rows, err := dbConn.QueryContext(ctx, statusQuery, args...) + if err != nil { + errChan <- fmt.Errorf("status query failed: %w", err) + return + } + defer rows.Close() + + mu.Lock() + for rows.Next() { + var status string + var count int + if err := rows.Scan(&status, &count); err != nil { + mu.Unlock() + errChan <- fmt.Errorf("status scan failed: %w", err) + return + } + aggregate.ByStatus[status] = count + switch status { + case "active": + aggregate.TotalActive = count + case "draft": + aggregate.TotalDraft = count + case "inactive": + aggregate.TotalInactive = count + } + } + mu.Unlock() + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("status iteration error: %w", err) + } + }() + + // 2. Count by Dinas + wg.Add(1) + go func() { + defer wg.Done() + dinasQuery := fmt.Sprintf(` + SELECT COALESCE("Dinas", 'Unknown') as dinas, COUNT(*) + FROM data_retribusi + WHERE %s AND "Dinas" IS NOT NULL AND TRIM("Dinas") != '' + GROUP BY "Dinas" + ORDER BY COUNT(*) DESC + LIMIT 10`, whereClause) + + rows, err := dbConn.QueryContext(ctx, dinasQuery, args...) + if err != nil { + errChan <- fmt.Errorf("dinas query failed: %w", err) + return + } + defer rows.Close() + + mu.Lock() + for rows.Next() { + var dinas string + var count int + if err := rows.Scan(&dinas, &count); err != nil { + mu.Unlock() + errChan <- fmt.Errorf("dinas scan failed: %w", err) + return + } + aggregate.ByDinas[dinas] = count + } + mu.Unlock() + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("dinas iteration error: %w", err) + } + }() + + // 3. Count by Jenis + wg.Add(1) + go func() { + defer wg.Done() + jenisQuery := fmt.Sprintf(` + SELECT COALESCE("Jenis", 'Unknown') as jenis, COUNT(*) + FROM data_retribusi + WHERE %s AND "Jenis" IS NOT NULL AND TRIM("Jenis") != '' + GROUP BY "Jenis" + ORDER BY COUNT(*) DESC + LIMIT 10`, whereClause) + + rows, err := dbConn.QueryContext(ctx, jenisQuery, args...) + if err != nil { + errChan <- fmt.Errorf("jenis query failed: %w", err) + return + } + defer rows.Close() + + mu.Lock() + for rows.Next() { + var jenis string + var count int + if err := rows.Scan(&jenis, &count); err != nil { + mu.Unlock() + errChan <- fmt.Errorf("jenis scan failed: %w", err) + return + } + aggregate.ByJenis[jenis] = count + } + mu.Unlock() + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("jenis iteration error: %w", err) + } + }() + + // 4. Get last updated time dan today statistics + wg.Add(1) + go func() { + defer wg.Done() + + // Last updated + lastUpdatedQuery := fmt.Sprintf(` + SELECT MAX(date_updated) + FROM data_retribusi + WHERE %s AND date_updated IS NOT NULL`, whereClause) + + var lastUpdated sql.NullTime + if err := dbConn.QueryRowContext(ctx, lastUpdatedQuery, args...).Scan(&lastUpdated); err != nil { + errChan <- fmt.Errorf("last updated query failed: %w", err) + return + } + + // Today statistics + today := time.Now().Format("2006-01-02") + todayStatsQuery := fmt.Sprintf(` + SELECT + SUM(CASE WHEN DATE(date_created) = $%d THEN 1 ELSE 0 END) as created_today, + SUM(CASE WHEN DATE(date_updated) = $%d AND DATE(date_created) != $%d THEN 1 ELSE 0 END) as updated_today + FROM data_retribusi + WHERE %s`, len(args)+1, len(args)+1, len(args)+1, whereClause) + + todayArgs := append(args, today) + var createdToday, updatedToday int + if err := dbConn.QueryRowContext(ctx, todayStatsQuery, todayArgs...).Scan(&createdToday, &updatedToday); err != nil { + errChan <- fmt.Errorf("today stats query failed: %w", err) + return + } + + mu.Lock() + if lastUpdated.Valid { + aggregate.LastUpdated = &lastUpdated.Time + } + aggregate.CreatedToday = createdToday + aggregate.UpdatedToday = updatedToday + mu.Unlock() + }() + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + return nil, err + } + } + + return aggregate, nil +} + +// Get total count dengan filter support +func (h *RetribusiHandler) getTotalCount(ctx context.Context, dbConn *sql.DB, filter retribusi.RetribusiFilter, total *int) error { + whereClause, args := h.buildWhereClause(filter) + countQuery := fmt.Sprintf(`SELECT COUNT(*) FROM data_retribusi WHERE %s`, whereClause) + + if err := dbConn.QueryRowContext(ctx, countQuery, args...).Scan(total); err != nil { + return fmt.Errorf("total count query failed: %w", err) + } + + return nil +} + +// Enhanced fetchRetribusis dengan filter support +func (h *RetribusiHandler) fetchRetribusis(ctx context.Context, dbConn *sql.DB, filter retribusi.RetribusiFilter, limit, offset int) ([]retribusi.Retribusi, error) { + whereClause, args := h.buildWhereClause(filter) + + // Build the main query with pagination + query := fmt.Sprintf(` + SELECT + id, status, sort, user_created, date_created, user_updated, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3" + FROM data_retribusi + WHERE %s + ORDER BY date_created DESC NULLS LAST + LIMIT $%d OFFSET $%d`, + whereClause, len(args)+1, len(args)+2) + + // Add pagination parameters + args = append(args, limit, offset) + + rows, err := dbConn.QueryContext(ctx, query, args...) + if err != nil { + return nil, fmt.Errorf("fetch retribusis query failed: %w", err) + } + defer rows.Close() + + // Pre-allocate slice dengan kapasitas yang tepat + retribusis := make([]retribusi.Retribusi, 0, limit) + + for rows.Next() { + retribusi, err := h.scanRetribusi(rows) + if err != nil { + return nil, fmt.Errorf("scan retribusi failed: %w", err) + } + retribusis = append(retribusis, retribusi) + } + + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("rows iteration error: %w", err) + } + + logger.Info("Successfully fetched retribusis", map[string]interface{}{ + "count": len(retribusis), + "limit": limit, + "offset": offset, + }) + return retribusis, nil +} + +// Calculate pagination metadata +func (h *RetribusiHandler) calculateMeta(limit, offset, total int) models.MetaResponse { + totalPages := 0 + currentPage := 1 + + if limit > 0 { + totalPages = (total + limit - 1) / limit // Ceiling division + currentPage = (offset / limit) + 1 + } + + return models.MetaResponse{ + Limit: limit, + Offset: offset, + Total: total, + TotalPages: totalPages, + CurrentPage: currentPage, + HasNext: offset+limit < total, + HasPrev: offset > 0, + } +} + +// validateRetribusiSubmission performs validation for duplicate entries and daily submission limits +func (h *RetribusiHandler) validateRetribusiSubmission(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiCreateRequest) error { + // Import the validation utility + validator := validation.NewDuplicateValidator(dbConn) + + // Use default retribusi configuration + config := validation.DefaultRetribusiConfig() + + // Validate duplicate entries with active status for today + err := validator.ValidateDuplicate(ctx, config, "dummy_id") + if err != nil { + return fmt.Errorf("validation failed: %w", err) + } + + // Validate once per day submission + err = validator.ValidateOncePerDay(ctx, "data_retribusi", "id", "date_created", "daily_limit") + if err != nil { + return fmt.Errorf("daily submission limit exceeded: %w", err) + } + + return nil +} + +// Example usage of the validation utility with custom configuration +func (h *RetribusiHandler) validateWithCustomConfig(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiCreateRequest) error { + // Create validator instance + validator := validation.NewDuplicateValidator(dbConn) + + // Use custom configuration + config := validation.ValidationConfig{ + TableName: "data_retribusi", + IDColumn: "id", + StatusColumn: "status", + DateColumn: "date_created", + ActiveStatuses: []string{"active", "draft"}, + AdditionalFields: map[string]interface{}{ + "jenis": req.Jenis, + "dinas": req.Dinas, + }, + } + + // Validate with custom fields + fields := map[string]interface{}{ + "jenis": *req.Jenis, + "dinas": *req.Dinas, + } + + err := validator.ValidateDuplicateWithCustomFields(ctx, config, fields) + if err != nil { + return fmt.Errorf("custom validation failed: %w", err) + } + + return nil +} + +// GetLastSubmissionTime example +func (h *RetribusiHandler) getLastSubmissionTimeExample(ctx context.Context, dbConn *sql.DB, identifier string) (*time.Time, error) { + validator := validation.NewDuplicateValidator(dbConn) + return validator.GetLastSubmissionTime(ctx, "data_retribusi", "id", "date_created", identifier) +} diff --git a/internal/middleware/auth_middleware.go b/internal/middleware/auth_middleware.go new file mode 100644 index 0000000..1d3969c --- /dev/null +++ b/internal/middleware/auth_middleware.go @@ -0,0 +1,59 @@ +package middleware + +import ( + "fmt" + "net/http" + + "api-service/internal/config" + + "github.com/gin-gonic/gin" +) + +// ConfigurableAuthMiddleware provides flexible authentication based on configuration +func ConfigurableAuthMiddleware(cfg *config.Config) gin.HandlerFunc { + return func(c *gin.Context) { + // Skip authentication for development/testing if explicitly disabled + if !cfg.Keycloak.Enabled { + fmt.Println("Authentication is disabled - allowing all requests") + c.Next() + return + } + + // Use Keycloak authentication when enabled + AuthMiddleware()(c) + } +} + +// StrictAuthMiddleware enforces authentication regardless of Keycloak.Enabled setting +func StrictAuthMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + if appConfig == nil { + fmt.Println("AuthMiddleware: Config not initialized") + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "authentication service not configured"}) + return + } + + // Always enforce authentication + AuthMiddleware()(c) + } +} + +// OptionalKeycloakAuthMiddleware allows requests but adds authentication info if available +func OptionalKeycloakAuthMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + if appConfig == nil || !appConfig.Keycloak.Enabled { + c.Next() + return + } + + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + // No token provided, but continue + c.Next() + return + } + + // Try to validate token, but don't fail if invalid + AuthMiddleware()(c) + } +} diff --git a/internal/middleware/error_handler.go b/internal/middleware/error_handler.go new file mode 100644 index 0000000..7f6ab82 --- /dev/null +++ b/internal/middleware/error_handler.go @@ -0,0 +1,54 @@ +package middleware + +import ( + models "api-service/internal/models" + "net/http" + + "github.com/gin-gonic/gin" +) + +// ErrorHandler handles errors globally +func ErrorHandler() gin.HandlerFunc { + return func(c *gin.Context) { + c.Next() + + if len(c.Errors) > 0 { + err := c.Errors.Last() + status := http.StatusInternalServerError + + // Determine status code based on error type + switch err.Type { + case gin.ErrorTypeBind: + status = http.StatusBadRequest + case gin.ErrorTypeRender: + status = http.StatusUnprocessableEntity + case gin.ErrorTypePrivate: + status = http.StatusInternalServerError + } + + response := models.ErrorResponse{ + Error: "internal_error", + Message: err.Error(), + Code: status, + } + + c.JSON(status, response) + } + } +} + +// CORS middleware configuration +func CORSConfig() gin.HandlerFunc { + return gin.HandlerFunc(func(c *gin.Context) { + c.Header("Access-Control-Allow-Origin", "*") + c.Header("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS, PATCH") + c.Header("Access-Control-Allow-Headers", "Origin, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization") + + if c.Request.Method == "OPTIONS" { + c.AbortWithStatus(204) + return + } + + c.Next() + }) +} diff --git a/internal/middleware/jwt_middleware.go b/internal/middleware/jwt_middleware.go new file mode 100644 index 0000000..708ef7f --- /dev/null +++ b/internal/middleware/jwt_middleware.go @@ -0,0 +1,77 @@ +package middleware + +import ( + services "api-service/internal/services/auth" + "net/http" + "strings" + + "github.com/gin-gonic/gin" +) + +// JWTAuthMiddleware validates JWT tokens generated by our auth service +func JWTAuthMiddleware(authService *services.AuthService) gin.HandlerFunc { + return func(c *gin.Context) { + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header missing"}) + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header format must be Bearer {token}"}) + return + } + + tokenString := parts[1] + + // Validate token + claims, err := authService.ValidateToken(tokenString) + if err != nil { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": err.Error()}) + return + } + + // Set user info in context + c.Set("user_id", claims.UserID) + c.Set("username", claims.Username) + c.Set("email", claims.Email) + c.Set("role", claims.Role) + + c.Next() + } +} + +// OptionalAuthMiddleware allows both authenticated and unauthenticated requests +func OptionalAuthMiddleware(authService *services.AuthService) gin.HandlerFunc { + return func(c *gin.Context) { + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + // No token provided, but continue + c.Next() + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + c.Next() + return + } + + tokenString := parts[1] + claims, err := authService.ValidateToken(tokenString) + if err != nil { + // Invalid token, but continue (don't abort) + c.Next() + return + } + + // Set user info in context + c.Set("user_id", claims.UserID) + c.Set("username", claims.Username) + c.Set("email", claims.Email) + c.Set("role", claims.Role) + + c.Next() + } +} diff --git a/internal/middleware/keycloak_middleware.go b/internal/middleware/keycloak_middleware.go new file mode 100644 index 0000000..a336154 --- /dev/null +++ b/internal/middleware/keycloak_middleware.go @@ -0,0 +1,254 @@ +package middleware + +/** Keycloak Auth Middleware **/ +import ( + "crypto/rsa" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "math/big" + "net/http" + "strings" + "sync" + "time" + + "api-service/internal/config" + + "github.com/gin-gonic/gin" + "github.com/golang-jwt/jwt/v5" + "golang.org/x/sync/singleflight" +) + +var ( + ErrInvalidToken = errors.New("invalid token") +) + +// JwksCache caches JWKS keys with expiration +type JwksCache struct { + mu sync.RWMutex + keys map[string]*rsa.PublicKey + expiresAt time.Time + sfGroup singleflight.Group + config *config.Config +} + +func NewJwksCache(cfg *config.Config) *JwksCache { + return &JwksCache{ + keys: make(map[string]*rsa.PublicKey), + config: cfg, + } +} + +func (c *JwksCache) GetKey(kid string) (*rsa.PublicKey, error) { + c.mu.RLock() + if key, ok := c.keys[kid]; ok && time.Now().Before(c.expiresAt) { + c.mu.RUnlock() + return key, nil + } + c.mu.RUnlock() + + // Fetch keys with singleflight to avoid concurrent fetches + v, err, _ := c.sfGroup.Do("fetch_jwks", func() (interface{}, error) { + return c.fetchKeys() + }) + if err != nil { + return nil, err + } + + keys := v.(map[string]*rsa.PublicKey) + + c.mu.Lock() + c.keys = keys + c.expiresAt = time.Now().Add(1 * time.Hour) // cache for 1 hour + c.mu.Unlock() + + key, ok := keys[kid] + if !ok { + return nil, fmt.Errorf("key with kid %s not found", kid) + } + return key, nil +} + +func (c *JwksCache) fetchKeys() (map[string]*rsa.PublicKey, error) { + if !c.config.Keycloak.Enabled { + return nil, fmt.Errorf("keycloak authentication is disabled") + } + + jwksURL := c.config.Keycloak.JwksURL + if jwksURL == "" { + // Construct JWKS URL from issuer if not explicitly provided + jwksURL = c.config.Keycloak.Issuer + "/protocol/openid-connect/certs" + } + + resp, err := http.Get(jwksURL) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + var jwksData struct { + Keys []struct { + Kid string `json:"kid"` + Kty string `json:"kty"` + N string `json:"n"` + E string `json:"e"` + } `json:"keys"` + } + + if err := json.NewDecoder(resp.Body).Decode(&jwksData); err != nil { + return nil, err + } + + keys := make(map[string]*rsa.PublicKey) + for _, key := range jwksData.Keys { + if key.Kty != "RSA" { + continue + } + pubKey, err := parseRSAPublicKey(key.N, key.E) + if err != nil { + continue + } + keys[key.Kid] = pubKey + } + return keys, nil +} + +// parseRSAPublicKey parses RSA public key components from base64url strings +func parseRSAPublicKey(nStr, eStr string) (*rsa.PublicKey, error) { + nBytes, err := base64UrlDecode(nStr) + if err != nil { + return nil, err + } + eBytes, err := base64UrlDecode(eStr) + if err != nil { + return nil, err + } + + var eInt int + for _, b := range eBytes { + eInt = eInt<<8 + int(b) + } + + pubKey := &rsa.PublicKey{ + N: new(big.Int).SetBytes(nBytes), + E: eInt, + } + return pubKey, nil +} + +func base64UrlDecode(s string) ([]byte, error) { + // Add padding if missing + if m := len(s) % 4; m != 0 { + s += strings.Repeat("=", 4-m) + } + return base64.URLEncoding.DecodeString(s) +} + +// Global config instance +var appConfig *config.Config +var jwksCacheInstance *JwksCache + +// InitializeAuth initializes the auth middleware with config +func InitializeAuth(cfg *config.Config) { + appConfig = cfg + jwksCacheInstance = NewJwksCache(cfg) +} + +// AuthMiddleware validates Bearer token as Keycloak JWT token +func AuthMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + if appConfig == nil { + fmt.Println("AuthMiddleware: Config not initialized") + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "authentication service not configured"}) + return + } + + if !appConfig.Keycloak.Enabled { + // Skip authentication if Keycloak is disabled but log for debugging + fmt.Println("AuthMiddleware: Keycloak authentication is disabled - allowing all requests") + c.Next() + return + } + + fmt.Println("AuthMiddleware: Checking Authorization header") // Debug log + + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + fmt.Println("AuthMiddleware: Authorization header missing") // Debug log + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header missing"}) + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + fmt.Println("AuthMiddleware: Invalid Authorization header format") // Debug log + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header format must be Bearer {token}"}) + return + } + + tokenString := parts[1] + + token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { + // Verify signing method + if _, ok := token.Method.(*jwt.SigningMethodRSA); !ok { + fmt.Printf("AuthMiddleware: Unexpected signing method: %v\n", token.Header["alg"]) // Debug log + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + + kid, ok := token.Header["kid"].(string) + if !ok { + fmt.Println("AuthMiddleware: kid header not found") // Debug log + return nil, errors.New("kid header not found") + } + + return jwksCacheInstance.GetKey(kid) + }, jwt.WithIssuer(appConfig.Keycloak.Issuer), jwt.WithAudience(appConfig.Keycloak.Audience)) + + if err != nil || !token.Valid { + fmt.Printf("AuthMiddleware: Invalid or expired token: %v\n", err) // Debug log + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid or expired token"}) + return + } + + fmt.Println("AuthMiddleware: Token valid, proceeding") // Debug log + // Token is valid, proceed + c.Next() + } +} + +/** JWT Bearer authentication middleware */ +// import ( +// "net/http" +// "strings" + +// "github.com/gin-gonic/gin" +// ) + +// AuthMiddleware validates Bearer token in Authorization header +func AuthJWTMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header missing"}) + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header format must be Bearer {token}"}) + return + } + + token := parts[1] + // For now, use a static token for validation. Replace with your logic. + const validToken = "your-static-token" + + if token != validToken { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid token"}) + return + } + + c.Next() + } +} diff --git a/internal/models/auth/auth.go b/internal/models/auth/auth.go new file mode 100644 index 0000000..872b45a --- /dev/null +++ b/internal/models/auth/auth.go @@ -0,0 +1,31 @@ +package models + +// LoginRequest represents the login request payload +type LoginRequest struct { + Username string `json:"username" binding:"required"` + Password string `json:"password" binding:"required"` +} + +// TokenResponse represents the token response +type TokenResponse struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type"` + ExpiresIn int64 `json:"expires_in"` +} + +// JWTClaims represents the JWT claims +type JWTClaims struct { + UserID string `json:"user_id"` + Username string `json:"username"` + Email string `json:"email"` + Role string `json:"role"` +} + +// User represents a user for authentication +type User struct { + ID string `json:"id"` + Username string `json:"username"` + Email string `json:"email"` + Password string `json:"-"` + Role string `json:"role"` +} diff --git a/internal/models/models.go b/internal/models/models.go new file mode 100644 index 0000000..2643ef8 --- /dev/null +++ b/internal/models/models.go @@ -0,0 +1,221 @@ +package models + +import ( + "database/sql" + "database/sql/driver" + "net/http" + "strconv" + "time" +) + +// NullableInt32 - your existing implementation +type NullableInt32 struct { + Int32 int32 `json:"int32,omitempty"` + Valid bool `json:"valid"` +} + +// Scan implements the sql.Scanner interface for NullableInt32 +func (n *NullableInt32) Scan(value interface{}) error { + var ni sql.NullInt32 + if err := ni.Scan(value); err != nil { + return err + } + n.Int32 = ni.Int32 + n.Valid = ni.Valid + return nil +} + +// Value implements the driver.Valuer interface for NullableInt32 +func (n NullableInt32) Value() (driver.Value, error) { + if !n.Valid { + return nil, nil + } + return n.Int32, nil +} + +// NullableString provides consistent nullable string handling +type NullableString struct { + String string `json:"string,omitempty"` + Valid bool `json:"valid"` +} + +// Scan implements the sql.Scanner interface for NullableString +func (n *NullableString) Scan(value interface{}) error { + var ns sql.NullString + if err := ns.Scan(value); err != nil { + return err + } + n.String = ns.String + n.Valid = ns.Valid + return nil +} + +// Value implements the driver.Valuer interface for NullableString +func (n NullableString) Value() (driver.Value, error) { + if !n.Valid { + return nil, nil + } + return n.String, nil +} + +// NullableTime provides consistent nullable time handling +type NullableTime struct { + Time time.Time `json:"time,omitempty"` + Valid bool `json:"valid"` +} + +// Scan implements the sql.Scanner interface for NullableTime +func (n *NullableTime) Scan(value interface{}) error { + var nt sql.NullTime + if err := nt.Scan(value); err != nil { + return err + } + n.Time = nt.Time + n.Valid = nt.Valid + return nil +} + +// Value implements the driver.Valuer interface for NullableTime +func (n NullableTime) Value() (driver.Value, error) { + if !n.Valid { + return nil, nil + } + return n.Time, nil +} + +// Metadata untuk pagination - dioptimalkan +type MetaResponse struct { + Limit int `json:"limit"` + Offset int `json:"offset"` + Total int `json:"total"` + TotalPages int `json:"total_pages"` + CurrentPage int `json:"current_page"` + HasNext bool `json:"has_next"` + HasPrev bool `json:"has_prev"` +} + +// Aggregate data untuk summary +type AggregateData struct { + TotalActive int `json:"total_active"` + TotalDraft int `json:"total_draft"` + TotalInactive int `json:"total_inactive"` + ByStatus map[string]int `json:"by_status"` + ByDinas map[string]int `json:"by_dinas,omitempty"` + ByJenis map[string]int `json:"by_jenis,omitempty"` + LastUpdated *time.Time `json:"last_updated,omitempty"` + CreatedToday int `json:"created_today"` + UpdatedToday int `json:"updated_today"` +} + +// Error response yang konsisten +type ErrorResponse struct { + Error string `json:"error"` + Code int `json:"code"` + Message string `json:"message"` + Timestamp time.Time `json:"timestamp"` +} + +// BaseRequest contains common fields for all BPJS requests +type BaseRequest struct { + RequestID string `json:"request_id,omitempty"` + Timestamp time.Time `json:"timestamp,omitempty"` +} + +// BaseResponse contains common response fields +type BaseResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + RequestID string `json:"request_id,omitempty"` + Timestamp string `json:"timestamp,omitempty"` +} + +// ErrorResponse represents error response structure +type ErrorResponseBpjs struct { + Status string `json:"status"` + Message string `json:"message"` + RequestID string `json:"request_id,omitempty"` + Errors map[string]interface{} `json:"errors,omitempty"` + Code string `json:"code,omitempty"` +} + +// PaginationRequest contains pagination parameters +type PaginationRequest struct { + Page int `json:"page" validate:"min=1"` + Limit int `json:"limit" validate:"min=1,max=100"` + SortBy string `json:"sort_by,omitempty"` + SortDir string `json:"sort_dir,omitempty" validate:"omitempty,oneof=asc desc"` +} + +// PaginationResponse contains pagination metadata +type PaginationResponse struct { + CurrentPage int `json:"current_page"` + TotalPages int `json:"total_pages"` + TotalItems int64 `json:"total_items"` + ItemsPerPage int `json:"items_per_page"` + HasNext bool `json:"has_next"` + HasPrev bool `json:"has_previous"` +} + +// MetaInfo contains additional metadata +type MetaInfo struct { + Version string `json:"version"` + Environment string `json:"environment"` + ServerTime string `json:"server_time"` +} + +func GetStatusCodeFromMeta(metaCode interface{}) int { + statusCode := http.StatusOK + + if metaCode != nil { + switch v := metaCode.(type) { + case string: + if code, err := strconv.Atoi(v); err == nil { + if code >= 100 && code <= 599 { + statusCode = code + } else { + statusCode = http.StatusInternalServerError + } + } else { + statusCode = http.StatusInternalServerError + } + case int: + if v >= 100 && v <= 599 { + statusCode = v + } else { + statusCode = http.StatusInternalServerError + } + case float64: + code := int(v) + if code >= 100 && code <= 599 { + statusCode = code + } else { + statusCode = http.StatusInternalServerError + } + default: + statusCode = http.StatusInternalServerError + } + } + + return statusCode +} + +// Validation constants +const ( + StatusDraft = "draft" + StatusActive = "active" + StatusInactive = "inactive" + StatusDeleted = "deleted" +) + +// ValidStatuses untuk validasi +var ValidStatuses = []string{StatusDraft, StatusActive, StatusInactive} + +// IsValidStatus helper function +func IsValidStatus(status string) bool { + for _, validStatus := range ValidStatuses { + if status == validStatus { + return true + } + } + return false +} diff --git a/internal/models/retribusi/retribusi.go b/internal/models/retribusi/retribusi.go new file mode 100644 index 0000000..7907527 --- /dev/null +++ b/internal/models/retribusi/retribusi.go @@ -0,0 +1,228 @@ +package retribusi + +import ( + "api-service/internal/models" + "encoding/json" + "time" +) + +// Retribusi represents the data structure for the retribusi table +// with proper null handling and optimized JSON marshaling +type Retribusi struct { + ID string `json:"id" db:"id"` + Status string `json:"status" db:"status"` + Sort models.NullableInt32 `json:"sort,omitempty" db:"sort"` + UserCreated models.NullableString `json:"user_created,omitempty" db:"user_created"` + DateCreated models.NullableTime `json:"date_created,omitempty" db:"date_created"` + UserUpdated models.NullableString `json:"user_updated,omitempty" db:"user_updated"` + DateUpdated models.NullableTime `json:"date_updated,omitempty" db:"date_updated"` + Jenis models.NullableString `json:"jenis,omitempty" db:"Jenis"` + Pelayanan models.NullableString `json:"pelayanan,omitempty" db:"Pelayanan"` + Dinas models.NullableString `json:"dinas,omitempty" db:"Dinas"` + KelompokObyek models.NullableString `json:"kelompok_obyek,omitempty" db:"Kelompok_obyek"` + KodeTarif models.NullableString `json:"kode_tarif,omitempty" db:"Kode_tarif"` + Tarif models.NullableString `json:"tarif,omitempty" db:"Tarif"` + Satuan models.NullableString `json:"satuan,omitempty" db:"Satuan"` + TarifOvertime models.NullableString `json:"tarif_overtime,omitempty" db:"Tarif_overtime"` + SatuanOvertime models.NullableString `json:"satuan_overtime,omitempty" db:"Satuan_overtime"` + RekeningPokok models.NullableString `json:"rekening_pokok,omitempty" db:"Rekening_pokok"` + RekeningDenda models.NullableString `json:"rekening_denda,omitempty" db:"Rekening_denda"` + Uraian1 models.NullableString `json:"uraian_1,omitempty" db:"Uraian_1"` + Uraian2 models.NullableString `json:"uraian_2,omitempty" db:"Uraian_2"` + Uraian3 models.NullableString `json:"uraian_3,omitempty" db:"Uraian_3"` +} + +// Custom JSON marshaling untuk Retribusi agar NULL values tidak muncul di response +func (r Retribusi) MarshalJSON() ([]byte, error) { + type Alias Retribusi + aux := &struct { + Sort *int `json:"sort,omitempty"` + UserCreated *string `json:"user_created,omitempty"` + DateCreated *time.Time `json:"date_created,omitempty"` + UserUpdated *string `json:"user_updated,omitempty"` + DateUpdated *time.Time `json:"date_updated,omitempty"` + Jenis *string `json:"jenis,omitempty"` + Pelayanan *string `json:"pelayanan,omitempty"` + Dinas *string `json:"dinas,omitempty"` + KelompokObyek *string `json:"kelompok_obyek,omitempty"` + KodeTarif *string `json:"kode_tarif,omitempty"` + Tarif *string `json:"tarif,omitempty"` + Satuan *string `json:"satuan,omitempty"` + TarifOvertime *string `json:"tarif_overtime,omitempty"` + SatuanOvertime *string `json:"satuan_overtime,omitempty"` + RekeningPokok *string `json:"rekening_pokok,omitempty"` + RekeningDenda *string `json:"rekening_denda,omitempty"` + Uraian1 *string `json:"uraian_1,omitempty"` + Uraian2 *string `json:"uraian_2,omitempty"` + Uraian3 *string `json:"uraian_3,omitempty"` + *Alias + }{ + Alias: (*Alias)(&r), + } + + // Convert NullableInt32 to pointer + if r.Sort.Valid { + sort := int(r.Sort.Int32) + aux.Sort = &sort + } + if r.UserCreated.Valid { + aux.UserCreated = &r.UserCreated.String + } + if r.DateCreated.Valid { + aux.DateCreated = &r.DateCreated.Time + } + if r.UserUpdated.Valid { + aux.UserUpdated = &r.UserUpdated.String + } + if r.DateUpdated.Valid { + aux.DateUpdated = &r.DateUpdated.Time + } + if r.Jenis.Valid { + aux.Jenis = &r.Jenis.String + } + if r.Pelayanan.Valid { + aux.Pelayanan = &r.Pelayanan.String + } + if r.Dinas.Valid { + aux.Dinas = &r.Dinas.String + } + if r.KelompokObyek.Valid { + aux.KelompokObyek = &r.KelompokObyek.String + } + if r.KodeTarif.Valid { + aux.KodeTarif = &r.KodeTarif.String + } + if r.Tarif.Valid { + aux.Tarif = &r.Tarif.String + } + if r.Satuan.Valid { + aux.Satuan = &r.Satuan.String + } + if r.TarifOvertime.Valid { + aux.TarifOvertime = &r.TarifOvertime.String + } + if r.SatuanOvertime.Valid { + aux.SatuanOvertime = &r.SatuanOvertime.String + } + if r.RekeningPokok.Valid { + aux.RekeningPokok = &r.RekeningPokok.String + } + if r.RekeningDenda.Valid { + aux.RekeningDenda = &r.RekeningDenda.String + } + if r.Uraian1.Valid { + aux.Uraian1 = &r.Uraian1.String + } + if r.Uraian2.Valid { + aux.Uraian2 = &r.Uraian2.String + } + if r.Uraian3.Valid { + aux.Uraian3 = &r.Uraian3.String + } + + return json.Marshal(aux) +} + +// Helper methods untuk mendapatkan nilai yang aman +func (r *Retribusi) GetJenis() string { + if r.Jenis.Valid { + return r.Jenis.String + } + return "" +} + +func (r *Retribusi) GetDinas() string { + if r.Dinas.Valid { + return r.Dinas.String + } + return "" +} + +func (r *Retribusi) GetTarif() string { + if r.Tarif.Valid { + return r.Tarif.String + } + return "" +} + +// Response struct untuk GET by ID - diperbaiki struktur +type RetribusiGetByIDResponse struct { + Message string `json:"message"` + Data *Retribusi `json:"data"` +} + +// Request struct untuk create - dioptimalkan dengan validasi +type RetribusiCreateRequest struct { + Status string `json:"status" validate:"required,oneof=draft active inactive"` + Jenis *string `json:"jenis,omitempty" validate:"omitempty,min=1,max=255"` + Pelayanan *string `json:"pelayanan,omitempty" validate:"omitempty,min=1,max=255"` + Dinas *string `json:"dinas,omitempty" validate:"omitempty,min=1,max=255"` + KelompokObyek *string `json:"kelompok_obyek,omitempty" validate:"omitempty,min=1,max=255"` + KodeTarif *string `json:"kode_tarif,omitempty" validate:"omitempty,min=1,max=255"` + Uraian1 *string `json:"uraian_1,omitempty"` + Uraian2 *string `json:"uraian_2,omitempty"` + Uraian3 *string `json:"uraian_3,omitempty"` + Tarif *string `json:"tarif,omitempty" validate:"omitempty,numeric"` + Satuan *string `json:"satuan,omitempty" validate:"omitempty,min=1,max=255"` + TarifOvertime *string `json:"tarif_overtime,omitempty" validate:"omitempty,numeric"` + SatuanOvertime *string `json:"satuan_overtime,omitempty" validate:"omitempty,min=1,max=255"` + RekeningPokok *string `json:"rekening_pokok,omitempty" validate:"omitempty,min=1,max=255"` + RekeningDenda *string `json:"rekening_denda,omitempty" validate:"omitempty,min=1,max=255"` +} + +// Response struct untuk create +type RetribusiCreateResponse struct { + Message string `json:"message"` + Data *Retribusi `json:"data"` +} + +// Update request - sama seperti create tapi dengan ID +type RetribusiUpdateRequest struct { + ID string `json:"-" validate:"required,uuid4"` // ID dari URL path + Status string `json:"status" validate:"required,oneof=draft active inactive"` + Jenis *string `json:"jenis,omitempty" validate:"omitempty,min=1,max=255"` + Pelayanan *string `json:"pelayanan,omitempty" validate:"omitempty,min=1,max=255"` + Dinas *string `json:"dinas,omitempty" validate:"omitempty,min=1,max=255"` + KelompokObyek *string `json:"kelompok_obyek,omitempty" validate:"omitempty,min=1,max=255"` + KodeTarif *string `json:"kode_tarif,omitempty" validate:"omitempty,min=1,max=255"` + Uraian1 *string `json:"uraian_1,omitempty"` + Uraian2 *string `json:"uraian_2,omitempty"` + Uraian3 *string `json:"uraian_3,omitempty"` + Tarif *string `json:"tarif,omitempty" validate:"omitempty,numeric"` + Satuan *string `json:"satuan,omitempty" validate:"omitempty,min=1,max=255"` + TarifOvertime *string `json:"tarif_overtime,omitempty" validate:"omitempty,numeric"` + SatuanOvertime *string `json:"satuan_overtime,omitempty" validate:"omitempty,min=1,max=255"` + RekeningPokok *string `json:"rekening_pokok,omitempty" validate:"omitempty,min=1,max=255"` + RekeningDenda *string `json:"rekening_denda,omitempty" validate:"omitempty,min=1,max=255"` +} + +// Response struct untuk update +type RetribusiUpdateResponse struct { + Message string `json:"message"` + Data *Retribusi `json:"data"` +} + +// Response struct untuk delete +type RetribusiDeleteResponse struct { + Message string `json:"message"` + ID string `json:"id"` +} + +// Enhanced GET response dengan pagination dan aggregation +type RetribusiGetResponse struct { + Message string `json:"message"` + Data []Retribusi `json:"data"` + Meta models.MetaResponse `json:"meta"` + Summary *models.AggregateData `json:"summary,omitempty"` +} + +// Filter struct untuk query parameters +type RetribusiFilter struct { + Status *string `json:"status,omitempty" form:"status"` + Jenis *string `json:"jenis,omitempty" form:"jenis"` + Dinas *string `json:"dinas,omitempty" form:"dinas"` + KelompokObyek *string `json:"kelompok_obyek,omitempty" form:"kelompok_obyek"` + Search *string `json:"search,omitempty" form:"search"` + DateFrom *time.Time `json:"date_from,omitempty" form:"date_from"` + DateTo *time.Time `json:"date_to,omitempty" form:"date_to"` +} diff --git a/internal/models/validation.go b/internal/models/validation.go new file mode 100644 index 0000000..1462d35 --- /dev/null +++ b/internal/models/validation.go @@ -0,0 +1,106 @@ +package models + +import ( + "regexp" + "strings" + "time" + + "github.com/go-playground/validator/v10" +) + +// CustomValidator wraps the validator +type CustomValidator struct { + Validator *validator.Validate +} + +// Validate validates struct +func (cv *CustomValidator) Validate(i interface{}) error { + return cv.Validator.Struct(i) +} + +// RegisterCustomValidations registers custom validation rules +func RegisterCustomValidations(v *validator.Validate) { + // Validate Indonesian phone number + v.RegisterValidation("indonesian_phone", validateIndonesianPhone) + + // Validate BPJS card number format + v.RegisterValidation("bpjs_card", validateBPJSCard) + + // Validate Indonesian NIK + v.RegisterValidation("indonesian_nik", validateIndonesianNIK) + + // Validate date format YYYY-MM-DD + v.RegisterValidation("date_format", validateDateFormat) + + // Validate ICD-10 code format + v.RegisterValidation("icd10", validateICD10) + + // Validate ICD-9-CM procedure code + v.RegisterValidation("icd9cm", validateICD9CM) +} + +func validateIndonesianPhone(fl validator.FieldLevel) bool { + phone := fl.Field().String() + if phone == "" { + return true // Optional field + } + + // Indonesian phone number pattern: +62, 62, 08, or 8 + pattern := `^(\+?62|0?8)[1-9][0-9]{7,11}$` + matched, _ := regexp.MatchString(pattern, phone) + return matched +} + +func validateBPJSCard(fl validator.FieldLevel) bool { + card := fl.Field().String() + if len(card) != 13 { + return false + } + + // BPJS card should be numeric + pattern := `^\d{13}$` + matched, _ := regexp.MatchString(pattern, card) + return matched +} + +func validateIndonesianNIK(fl validator.FieldLevel) bool { + nik := fl.Field().String() + if len(nik) != 16 { + return false + } + + // NIK should be numeric + pattern := `^\d{16}$` + matched, _ := regexp.MatchString(pattern, nik) + return matched +} + +func validateDateFormat(fl validator.FieldLevel) bool { + dateStr := fl.Field().String() + _, err := time.Parse("2006-01-02", dateStr) + return err == nil +} + +func validateICD10(fl validator.FieldLevel) bool { + code := fl.Field().String() + if code == "" { + return true + } + + // Basic ICD-10 pattern: Letter followed by 2 digits, optional dot and more digits + pattern := `^[A-Z]\d{2}(\.\d+)?$` + matched, _ := regexp.MatchString(pattern, strings.ToUpper(code)) + return matched +} + +func validateICD9CM(fl validator.FieldLevel) bool { + code := fl.Field().String() + if code == "" { + return true + } + + // Basic ICD-9-CM procedure pattern: 2-4 digits with optional decimal + pattern := `^\d{2,4}(\.\d+)?$` + matched, _ := regexp.MatchString(pattern, code) + return matched +} diff --git a/internal/routes/v1/routes.go b/internal/routes/v1/routes.go new file mode 100644 index 0000000..1e4632d --- /dev/null +++ b/internal/routes/v1/routes.go @@ -0,0 +1,151 @@ +package v1 + +import ( + "api-service/internal/config" + "api-service/internal/database" + authHandlers "api-service/internal/handlers/auth" + healthcheckHandlers "api-service/internal/handlers/healthcheck" + retribusiHandlers "api-service/internal/handlers/retribusi" + "api-service/internal/middleware" + services "api-service/internal/services/auth" + "api-service/pkg/logger" + "time" + + "github.com/gin-gonic/gin" + swaggerFiles "github.com/swaggo/files" + ginSwagger "github.com/swaggo/gin-swagger" +) + +func RegisterRoutes(cfg *config.Config) *gin.Engine { + router := gin.New() + + // Initialize auth middleware configuration + middleware.InitializeAuth(cfg) + + // Add global middleware + router.Use(middleware.CORSConfig()) + router.Use(middleware.ErrorHandler()) + router.Use(logger.RequestLoggerMiddleware(logger.Default())) + router.Use(gin.Recovery()) + + // Initialize services with error handling + authService := services.NewAuthService(cfg) + if authService == nil { + logger.Fatal("Failed to initialize auth service") + } + + // Initialize database service + dbService := database.New(cfg) + + // ============================================================================= + // HEALTH CHECK & SYSTEM ROUTES + // ============================================================================= + + healthCheckHandler := healthcheckHandlers.NewHealthCheckHandler(dbService) + sistem := router.Group("/api/sistem") + { + sistem.GET("/health", healthCheckHandler.CheckHealth) + sistem.GET("/databases", func(c *gin.Context) { + c.JSON(200, gin.H{ + "databases": dbService.ListDBs(), + "health": dbService.Health(), + "timestamp": time.Now().Unix(), + }) + }) + sistem.GET("/info", func(c *gin.Context) { + c.JSON(200, gin.H{ + "service": "API Service v1.0.0", + "websocket_active": true, + "databases": dbService.ListDBs(), + "timestamp": time.Now().Unix(), + }) + }) + } + + // ============================================================================= + // SWAGGER DOCUMENTATION + // ============================================================================= + + router.GET("/swagger/*any", ginSwagger.WrapHandler( + swaggerFiles.Handler, + ginSwagger.DefaultModelsExpandDepth(-1), + ginSwagger.DeepLinking(true), + )) + + // ============================================================================= + // API v1 GROUP + // ============================================================================= + + v1 := router.Group("/api/v1") + + // ============================================================================= + // PUBLIC ROUTES (No Authentication Required) + // ============================================================================= + + // Authentication routes + authHandler := authHandlers.NewAuthHandler(authService) + tokenHandler := authHandlers.NewTokenHandler(authService) + + // Basic auth routes + v1.POST("/auth/login", authHandler.Login) + v1.POST("/auth/register", authHandler.Register) + v1.POST("/auth/refresh", authHandler.RefreshToken) + + // Token generation routes + v1.POST("/token/generate", tokenHandler.GenerateToken) + v1.POST("/token/generate-direct", tokenHandler.GenerateTokenDirect) + + // ============================================================================= + // PUBLISHED ROUTES + // ============================================================================= + + // Retribusi endpoints with + retribusiHandler := retribusiHandlers.NewRetribusiHandler() + retribusiGroup := v1.Group("/retribusi") + { + retribusiGroup.GET("", retribusiHandler.GetRetribusi) + retribusiGroup.GET("/dynamic", retribusiHandler.GetRetribusiDynamic) + retribusiGroup.GET("/search", retribusiHandler.SearchRetribusiAdvanced) + retribusiGroup.GET("/id/:id", retribusiHandler.GetRetribusiByID) + retribusiGroup.POST("", func(c *gin.Context) { + retribusiHandler.CreateRetribusi(c) + }) + + retribusiGroup.PUT("/id/:id", func(c *gin.Context) { + retribusiHandler.UpdateRetribusi(c) + }) + + retribusiGroup.DELETE("/id/:id", func(c *gin.Context) { + retribusiHandler.DeleteRetribusi(c) + }) + } + + // ============================================================================= + // PROTECTED ROUTES (Authentication Required) + // ============================================================================= + + protected := v1.Group("/") + protected.Use(middleware.ConfigurableAuthMiddleware(cfg)) + + // Protected retribusi endpoints (Authentication Required) + protectedRetribusiGroup := protected.Group("/retribusi") + { + protectedRetribusiGroup.GET("", retribusiHandler.GetRetribusi) + protectedRetribusiGroup.GET("/dynamic", retribusiHandler.GetRetribusiDynamic) + protectedRetribusiGroup.GET("/search", retribusiHandler.SearchRetribusiAdvanced) + protectedRetribusiGroup.GET("/id/:id", retribusiHandler.GetRetribusiByID) + protectedRetribusiGroup.POST("", func(c *gin.Context) { + retribusiHandler.CreateRetribusi(c) + }) + + protectedRetribusiGroup.PUT("/id/:id", func(c *gin.Context) { + retribusiHandler.UpdateRetribusi(c) + }) + + protectedRetribusiGroup.DELETE("/id/:id", func(c *gin.Context) { + retribusiHandler.DeleteRetribusi(c) + }) + } + + return router +} diff --git a/internal/server/server.go b/internal/server/server.go new file mode 100644 index 0000000..98ef90c --- /dev/null +++ b/internal/server/server.go @@ -0,0 +1,53 @@ +package server + +import ( + "fmt" + "net/http" + "os" + "strconv" + "time" + + _ "github.com/joho/godotenv/autoload" + + "api-service/internal/config" + "api-service/internal/database" + v1 "api-service/internal/routes/v1" +) + +var dbService database.Service // Global variable to hold the database service instance + +type Server struct { + port int + db database.Service +} + +func NewServer() *http.Server { + // Load configuration + cfg := config.LoadConfig() + cfg.Validate() + + port, _ := strconv.Atoi(os.Getenv("PORT")) + if port == 0 { + port = cfg.Server.Port + } + + if dbService == nil { // Check if the database service is already initialized + dbService = database.New(cfg) // Initialize only once + } + + NewServer := &Server{ + port: port, + db: dbService, // Use the global database service instance + } + + // Declare Server config + server := &http.Server{ + Addr: fmt.Sprintf(":%d", NewServer.port), + Handler: v1.RegisterRoutes(cfg), + IdleTimeout: time.Minute, + ReadTimeout: 10 * time.Second, + WriteTimeout: 30 * time.Second, + } + + return server +} diff --git a/internal/services/auth/auth.go b/internal/services/auth/auth.go new file mode 100644 index 0000000..d76aadb --- /dev/null +++ b/internal/services/auth/auth.go @@ -0,0 +1,169 @@ +package services + +import ( + "api-service/internal/config" + models "api-service/internal/models/auth" + "errors" + "time" + + "github.com/golang-jwt/jwt/v5" + "golang.org/x/crypto/bcrypt" +) + +// AuthService handles authentication logic +type AuthService struct { + config *config.Config + users map[string]*models.User // In-memory user store for demo +} + +// NewAuthService creates a new authentication service +func NewAuthService(cfg *config.Config) *AuthService { + // Initialize with demo users + users := make(map[string]*models.User) + + // Add demo users + users["admin"] = &models.User{ + ID: "1", + Username: "admin", + Email: "admin@example.com", + Password: "$2a$10$92IXUNpkjO0rOQ5byMi.Ye4oKoEa3Ro9llC/.og/at2.uheWG/igi", // password + Role: "admin", + } + + users["user"] = &models.User{ + ID: "2", + Username: "user", + Email: "user@example.com", + Password: "$2a$10$92IXUNpkjO0rOQ5byMi.Ye4oKoEa3Ro9llC/.og/at2.uheWG/igi", // password + Role: "user", + } + + return &AuthService{ + config: cfg, + users: users, + } +} + +// Login authenticates user and generates JWT token +func (s *AuthService) Login(username, password string) (*models.TokenResponse, error) { + user, exists := s.users[username] + if !exists { + return nil, errors.New("invalid credentials") + } + + // Verify password + err := bcrypt.CompareHashAndPassword([]byte(user.Password), []byte(password)) + if err != nil { + return nil, errors.New("invalid credentials") + } + + // Generate JWT token + token, err := s.generateToken(user) + if err != nil { + return nil, err + } + + return &models.TokenResponse{ + AccessToken: token, + TokenType: "Bearer", + ExpiresIn: 3600, // 1 hour + }, nil +} + +// generateToken creates a new JWT token for the user +func (s *AuthService) generateToken(user *models.User) (string, error) { + // Create claims + claims := jwt.MapClaims{ + "user_id": user.ID, + "username": user.Username, + "email": user.Email, + "role": user.Role, + "exp": time.Now().Add(time.Hour * 1).Unix(), + "iat": time.Now().Unix(), + } + + // Create token + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + + // Sign token with secret key + secretKey := []byte(s.getJWTSecret()) + return token.SignedString(secretKey) +} + +// GenerateTokenForUser generates a JWT token for a specific user +func (s *AuthService) GenerateTokenForUser(user *models.User) (string, error) { + // Create claims + claims := jwt.MapClaims{ + "user_id": user.ID, + "username": user.Username, + "email": user.Email, + "role": user.Role, + "exp": time.Now().Add(time.Hour * 1).Unix(), + "iat": time.Now().Unix(), + } + + // Create token + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + + // Sign token with secret key + secretKey := []byte(s.getJWTSecret()) + return token.SignedString(secretKey) +} + +// ValidateToken validates the JWT token +func (s *AuthService) ValidateToken(tokenString string) (*models.JWTClaims, error) { + token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, errors.New("unexpected signing method") + } + return []byte(s.getJWTSecret()), nil + }) + + if err != nil { + return nil, err + } + + if !token.Valid { + return nil, errors.New("invalid token") + } + + claims, ok := token.Claims.(jwt.MapClaims) + if !ok { + return nil, errors.New("invalid claims") + } + + return &models.JWTClaims{ + UserID: claims["user_id"].(string), + Username: claims["username"].(string), + Email: claims["email"].(string), + Role: claims["role"].(string), + }, nil +} + +// getJWTSecret returns the JWT secret key +func (s *AuthService) getJWTSecret() string { + // In production, this should come from environment variables + return "your-secret-key-change-this-in-production" +} + +// RegisterUser registers a new user (for demo purposes) +func (s *AuthService) RegisterUser(username, email, password, role string) error { + if _, exists := s.users[username]; exists { + return errors.New("username already exists") + } + + hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost) + if err != nil { + return err + } + + s.users[username] = &models.User{ + ID: string(rune(len(s.users) + 1)), + Username: username, + Email: email, + Password: string(hashedPassword), + Role: role, + } + + return nil +} diff --git a/internal/utils/filters/dynamic_filter.go b/internal/utils/filters/dynamic_filter.go new file mode 100644 index 0000000..d735ce2 --- /dev/null +++ b/internal/utils/filters/dynamic_filter.go @@ -0,0 +1,593 @@ +package utils + +import ( + "fmt" + "reflect" + "strings" + "sync" +) + +// FilterOperator represents supported filter operators +type FilterOperator string + +const ( + OpEqual FilterOperator = "_eq" + OpNotEqual FilterOperator = "_neq" + OpLike FilterOperator = "_like" + OpILike FilterOperator = "_ilike" + OpIn FilterOperator = "_in" + OpNotIn FilterOperator = "_nin" + OpGreaterThan FilterOperator = "_gt" + OpGreaterThanEqual FilterOperator = "_gte" + OpLessThan FilterOperator = "_lt" + OpLessThanEqual FilterOperator = "_lte" + OpBetween FilterOperator = "_between" + OpNotBetween FilterOperator = "_nbetween" + OpNull FilterOperator = "_null" + OpNotNull FilterOperator = "_nnull" + OpContains FilterOperator = "_contains" + OpNotContains FilterOperator = "_ncontains" + OpStartsWith FilterOperator = "_starts_with" + OpEndsWith FilterOperator = "_ends_with" +) + +// DynamicFilter represents a single filter condition +type DynamicFilter struct { + Column string `json:"column"` + Operator FilterOperator `json:"operator"` + Value interface{} `json:"value"` + LogicOp string `json:"logic_op,omitempty"` // AND, OR +} + +// FilterGroup represents a group of filters +type FilterGroup struct { + Filters []DynamicFilter `json:"filters"` + LogicOp string `json:"logic_op"` // AND, OR +} + +// DynamicQuery represents the complete query structure +type DynamicQuery struct { + Fields []string `json:"fields,omitempty"` + Filters []FilterGroup `json:"filters,omitempty"` + Sort []SortField `json:"sort,omitempty"` + Limit int `json:"limit"` + Offset int `json:"offset"` + GroupBy []string `json:"group_by,omitempty"` + Having []FilterGroup `json:"having,omitempty"` +} + +// SortField represents sorting configuration +type SortField struct { + Column string `json:"column"` + Order string `json:"order"` // ASC, DESC +} + +// QueryBuilder builds SQL queries from dynamic filters +type QueryBuilder struct { + tableName string + columnMapping map[string]string // Maps API field names to DB column names + allowedColumns map[string]bool // Security: only allow specified columns + paramCounter int + mu *sync.RWMutex +} + +// NewQueryBuilder creates a new query builder instance +func NewQueryBuilder(tableName string) *QueryBuilder { + return &QueryBuilder{ + tableName: tableName, + columnMapping: make(map[string]string), + allowedColumns: make(map[string]bool), + paramCounter: 0, + } +} + +// SetColumnMapping sets the mapping between API field names and database column names +func (qb *QueryBuilder) SetColumnMapping(mapping map[string]string) *QueryBuilder { + qb.columnMapping = mapping + return qb +} + +// SetAllowedColumns sets the list of allowed columns for security +func (qb *QueryBuilder) SetAllowedColumns(columns []string) *QueryBuilder { + qb.allowedColumns = make(map[string]bool) + for _, col := range columns { + qb.allowedColumns[col] = true + } + return qb +} + +// BuildQuery builds the complete SQL query +func (qb *QueryBuilder) BuildQuery(query DynamicQuery) (string, []interface{}, error) { + qb.paramCounter = 0 + + // Build SELECT clause + selectClause := qb.buildSelectClause(query.Fields) + + // Build FROM clause + fromClause := fmt.Sprintf("FROM %s", qb.tableName) + + // Build WHERE clause + whereClause, whereArgs, err := qb.buildWhereClause(query.Filters) + if err != nil { + return "", nil, err + } + + // Build ORDER BY clause + orderClause := qb.buildOrderClause(query.Sort) + + // Build GROUP BY clause + groupClause := qb.buildGroupByClause(query.GroupBy) + + // Build HAVING clause + havingClause, havingArgs, err := qb.buildHavingClause(query.Having) + if err != nil { + return "", nil, err + } + + // Combine all parts + sqlParts := []string{selectClause, fromClause} + args := []interface{}{} + + if whereClause != "" { + sqlParts = append(sqlParts, "WHERE "+whereClause) + args = append(args, whereArgs...) + } + + if groupClause != "" { + sqlParts = append(sqlParts, groupClause) + } + + if havingClause != "" { + sqlParts = append(sqlParts, "HAVING "+havingClause) + args = append(args, havingArgs...) + } + + if orderClause != "" { + sqlParts = append(sqlParts, orderClause) + } + + // Add pagination + if query.Limit > 0 { + qb.paramCounter++ + sqlParts = append(sqlParts, fmt.Sprintf("LIMIT $%d", qb.paramCounter)) + args = append(args, query.Limit) + } + + if query.Offset > 0 { + qb.paramCounter++ + sqlParts = append(sqlParts, fmt.Sprintf("OFFSET $%d", qb.paramCounter)) + args = append(args, query.Offset) + } + + sql := strings.Join(sqlParts, " ") + return sql, args, nil +} + +// buildSelectClause builds the SELECT part of the query +func (qb *QueryBuilder) buildSelectClause(fields []string) string { + if len(fields) == 0 || (len(fields) == 1 && fields[0] == "*") { + return "SELECT *" + } + + var selectedFields []string + for _, field := range fields { + if field == "*.*" || field == "*" { + selectedFields = append(selectedFields, "*") + continue + } + + // Check if it's an expression (contains spaces, parentheses, etc.) + if strings.Contains(field, " ") || strings.Contains(field, "(") || strings.Contains(field, ")") { + // Expression, add as is + selectedFields = append(selectedFields, field) + continue + } + + // Security check: only allow specified columns (check original field name) + if len(qb.allowedColumns) > 0 && !qb.allowedColumns[field] { + continue + } + + // Map field name if mapping exists + if mappedCol, exists := qb.columnMapping[field]; exists { + field = mappedCol + } + + selectedFields = append(selectedFields, fmt.Sprintf(`"%s"`, field)) + } + + if len(selectedFields) == 0 { + return "SELECT *" + } + + return "SELECT " + strings.Join(selectedFields, ", ") +} + +// buildWhereClause builds the WHERE part of the query +func (qb *QueryBuilder) buildWhereClause(filterGroups []FilterGroup) (string, []interface{}, error) { + if len(filterGroups) == 0 { + return "", nil, nil + } + + var conditions []string + var args []interface{} + + for i, group := range filterGroups { + groupCondition, groupArgs, err := qb.buildFilterGroup(group) + if err != nil { + return "", nil, err + } + + if groupCondition != "" { + if i > 0 { + logicOp := "AND" + if group.LogicOp != "" { + logicOp = strings.ToUpper(group.LogicOp) + } + conditions = append(conditions, logicOp) + } + + conditions = append(conditions, groupCondition) + args = append(args, groupArgs...) + } + } + + return strings.Join(conditions, " "), args, nil +} + +// buildFilterGroup builds conditions for a filter group +func (qb *QueryBuilder) buildFilterGroup(group FilterGroup) (string, []interface{}, error) { + if len(group.Filters) == 0 { + return "", nil, nil + } + + var conditions []string + var args []interface{} + + for i, filter := range group.Filters { + condition, filterArgs, err := qb.buildFilterCondition(filter) + if err != nil { + return "", nil, err + } + + if condition != "" { + if i > 0 { + logicOp := "AND" + if filter.LogicOp != "" { + logicOp = strings.ToUpper(filter.LogicOp) + } else if group.LogicOp != "" { + logicOp = strings.ToUpper(group.LogicOp) + } + conditions = append(conditions, logicOp) + } + + conditions = append(conditions, condition) + args = append(args, filterArgs...) + } + } + + return strings.Join(conditions, " "), args, nil +} + +// buildFilterCondition builds a single filter condition +func (qb *QueryBuilder) buildFilterCondition(filter DynamicFilter) (string, []interface{}, error) { + // Security check (check original field name) + if len(qb.allowedColumns) > 0 && !qb.allowedColumns[filter.Column] { + return "", nil, nil + } + + // Map column name if mapping exists + column := filter.Column + if mappedCol, exists := qb.columnMapping[column]; exists { + column = mappedCol + } + + // Wrap column name in quotes for PostgreSQL + column = fmt.Sprintf(`"%s"`, column) + + switch filter.Operator { + case OpEqual: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s = $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpNotEqual: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s != $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpLike: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s LIKE $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpILike: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s ILIKE $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpIn: + values := qb.parseArrayValue(filter.Value) + if len(values) == 0 { + return "", nil, nil + } + + var placeholders []string + var args []interface{} + for _, val := range values { + qb.paramCounter++ + placeholders = append(placeholders, fmt.Sprintf("$%d", qb.paramCounter)) + args = append(args, val) + } + + return fmt.Sprintf("%s IN (%s)", column, strings.Join(placeholders, ", ")), args, nil + + case OpNotIn: + values := qb.parseArrayValue(filter.Value) + if len(values) == 0 { + return "", nil, nil + } + + var placeholders []string + var args []interface{} + for _, val := range values { + qb.paramCounter++ + placeholders = append(placeholders, fmt.Sprintf("$%d", qb.paramCounter)) + args = append(args, val) + } + + return fmt.Sprintf("%s NOT IN (%s)", column, strings.Join(placeholders, ", ")), args, nil + + case OpGreaterThan: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s > $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpGreaterThanEqual: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s >= $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpLessThan: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s < $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpLessThanEqual: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s <= $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpBetween: + if filter.Value == nil { + return "", nil, nil + } + values := qb.parseArrayValue(filter.Value) + if len(values) != 2 { + return "", nil, fmt.Errorf("between operator requires exactly 2 values") + } + qb.paramCounter++ + param1 := qb.paramCounter + qb.paramCounter++ + param2 := qb.paramCounter + return fmt.Sprintf("%s BETWEEN $%d AND $%d", column, param1, param2), []interface{}{values[0], values[1]}, nil + + case OpNotBetween: + if filter.Value == nil { + return "", nil, nil + } + values := qb.parseArrayValue(filter.Value) + if len(values) != 2 { + return "", nil, fmt.Errorf("not between operator requires exactly 2 values") + } + qb.paramCounter++ + param1 := qb.paramCounter + qb.paramCounter++ + param2 := qb.paramCounter + return fmt.Sprintf("%s NOT BETWEEN $%d AND $%d", column, param1, param2), []interface{}{values[0], values[1]}, nil + + case OpNull: + return fmt.Sprintf("%s IS NULL", column), nil, nil + + case OpNotNull: + return fmt.Sprintf("%s IS NOT NULL", column), nil, nil + + case OpContains: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + value := fmt.Sprintf("%%%v%%", filter.Value) + return fmt.Sprintf("%s ILIKE $%d", column, qb.paramCounter), []interface{}{value}, nil + + case OpNotContains: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + value := fmt.Sprintf("%%%v%%", filter.Value) + return fmt.Sprintf("%s NOT ILIKE $%d", column, qb.paramCounter), []interface{}{value}, nil + + case OpStartsWith: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + value := fmt.Sprintf("%v%%", filter.Value) + return fmt.Sprintf("%s ILIKE $%d", column, qb.paramCounter), []interface{}{value}, nil + + case OpEndsWith: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + value := fmt.Sprintf("%%%v", filter.Value) + return fmt.Sprintf("%s ILIKE $%d", column, qb.paramCounter), []interface{}{value}, nil + + default: + return "", nil, fmt.Errorf("unsupported operator: %s", filter.Operator) + } +} + +// parseArrayValue parses array values from various formats +func (qb *QueryBuilder) parseArrayValue(value interface{}) []interface{} { + if value == nil { + return nil + } + + // If it's already a slice + if reflect.TypeOf(value).Kind() == reflect.Slice { + v := reflect.ValueOf(value) + result := make([]interface{}, v.Len()) + for i := 0; i < v.Len(); i++ { + result[i] = v.Index(i).Interface() + } + return result + } + + // If it's a string, try to split by comma + if str, ok := value.(string); ok { + if strings.Contains(str, ",") { + parts := strings.Split(str, ",") + result := make([]interface{}, len(parts)) + for i, part := range parts { + result[i] = strings.TrimSpace(part) + } + return result + } + return []interface{}{str} + } + + return []interface{}{value} +} + +// buildOrderClause builds the ORDER BY clause +func (qb *QueryBuilder) buildOrderClause(sortFields []SortField) string { + if len(sortFields) == 0 { + return "" + } + + var orderParts []string + for _, sort := range sortFields { + column := sort.Column + + // Security check (check original field name) + if len(qb.allowedColumns) > 0 && !qb.allowedColumns[column] { + continue + } + + if mappedCol, exists := qb.columnMapping[column]; exists { + column = mappedCol + } + + order := "ASC" + if sort.Order != "" { + order = strings.ToUpper(sort.Order) + } + + orderParts = append(orderParts, fmt.Sprintf(`"%s" %s`, column, order)) + } + + if len(orderParts) == 0 { + return "" + } + + return "ORDER BY " + strings.Join(orderParts, ", ") +} + +// buildGroupByClause builds the GROUP BY clause +func (qb *QueryBuilder) buildGroupByClause(groupFields []string) string { + if len(groupFields) == 0 { + return "" + } + + var groupParts []string + for _, field := range groupFields { + column := field + if mappedCol, exists := qb.columnMapping[column]; exists { + column = mappedCol + } + + // Security check + if len(qb.allowedColumns) > 0 && !qb.allowedColumns[column] { + continue + } + + groupParts = append(groupParts, fmt.Sprintf(`"%s"`, column)) + } + + if len(groupParts) == 0 { + return "" + } + + return "GROUP BY " + strings.Join(groupParts, ", ") +} + +// buildHavingClause builds the HAVING clause +func (qb *QueryBuilder) buildHavingClause(havingGroups []FilterGroup) (string, []interface{}, error) { + if len(havingGroups) == 0 { + return "", nil, nil + } + + return qb.buildWhereClause(havingGroups) +} + +// BuildCountQuery builds a count query +func (qb *QueryBuilder) BuildCountQuery(query DynamicQuery) (string, []interface{}, error) { + qb.paramCounter = 0 + + // Build FROM clause + fromClause := fmt.Sprintf("FROM %s", qb.tableName) + + // Build WHERE clause + whereClause, whereArgs, err := qb.buildWhereClause(query.Filters) + if err != nil { + return "", nil, err + } + + // Build GROUP BY clause + groupClause := qb.buildGroupByClause(query.GroupBy) + + // Build HAVING clause + havingClause, havingArgs, err := qb.buildHavingClause(query.Having) + if err != nil { + return "", nil, err + } + + // Combine parts + sqlParts := []string{"SELECT COUNT(*)", fromClause} + args := []interface{}{} + + if whereClause != "" { + sqlParts = append(sqlParts, "WHERE "+whereClause) + args = append(args, whereArgs...) + } + + if groupClause != "" { + sqlParts = append(sqlParts, groupClause) + } + + if havingClause != "" { + sqlParts = append(sqlParts, "HAVING "+havingClause) + args = append(args, havingArgs...) + } + + sql := strings.Join(sqlParts, " ") + return sql, args, nil +} diff --git a/internal/utils/filters/query_parser.go b/internal/utils/filters/query_parser.go new file mode 100644 index 0000000..6b6f07e --- /dev/null +++ b/internal/utils/filters/query_parser.go @@ -0,0 +1,241 @@ +package utils + +import ( + "net/url" + "strconv" + "strings" + "time" +) + +// QueryParser parses HTTP query parameters into DynamicQuery +type QueryParser struct { + defaultLimit int + maxLimit int +} + +// NewQueryParser creates a new query parser +func NewQueryParser() *QueryParser { + return &QueryParser{ + defaultLimit: 10, + maxLimit: 100, + } +} + +// SetLimits sets default and maximum limits +func (qp *QueryParser) SetLimits(defaultLimit, maxLimit int) *QueryParser { + qp.defaultLimit = defaultLimit + qp.maxLimit = maxLimit + return qp +} + +// ParseQuery parses URL query parameters into DynamicQuery +func (qp *QueryParser) ParseQuery(values url.Values) (DynamicQuery, error) { + query := DynamicQuery{ + Limit: qp.defaultLimit, + Offset: 0, + } + + // Parse fields + if fields := values.Get("fields"); fields != "" { + if fields == "*.*" || fields == "*" { + query.Fields = []string{"*"} + } else { + query.Fields = strings.Split(fields, ",") + for i, field := range query.Fields { + query.Fields[i] = strings.TrimSpace(field) + } + } + } + + // Parse pagination + if limit := values.Get("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil { + if l > 0 && l <= qp.maxLimit { + query.Limit = l + } + } + } + + if offset := values.Get("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Parse filters + filters, err := qp.parseFilters(values) + if err != nil { + return query, err + } + query.Filters = filters + + // Parse sorting + sorts, err := qp.parseSorting(values) + if err != nil { + return query, err + } + query.Sort = sorts + + // Parse group by + if groupBy := values.Get("group"); groupBy != "" { + query.GroupBy = strings.Split(groupBy, ",") + for i, field := range query.GroupBy { + query.GroupBy[i] = strings.TrimSpace(field) + } + } + + return query, nil +} + +// parseFilters parses filter parameters +// Supports format: filter[column][operator]=value +func (qp *QueryParser) parseFilters(values url.Values) ([]FilterGroup, error) { + filterMap := make(map[string]map[string]string) + + // Group filters by column + for key, vals := range values { + if strings.HasPrefix(key, "filter[") && strings.HasSuffix(key, "]") { + // Parse filter[column][operator] format + parts := strings.Split(key[7:len(key)-1], "][") + if len(parts) == 2 { + column := parts[0] + operator := parts[1] + + if filterMap[column] == nil { + filterMap[column] = make(map[string]string) + } + + if len(vals) > 0 { + filterMap[column][operator] = vals[0] + } + } + } + } + + if len(filterMap) == 0 { + return nil, nil + } + + // Convert to FilterGroup + var filters []DynamicFilter + + for column, operators := range filterMap { + for opStr, value := range operators { + operator := FilterOperator(opStr) + + // Parse value based on operator + var parsedValue interface{} + switch operator { + case OpIn, OpNotIn: + if value != "" { + parsedValue = strings.Split(value, ",") + } + case OpBetween, OpNotBetween: + if value != "" { + parts := strings.Split(value, ",") + if len(parts) == 2 { + parsedValue = []interface{}{strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1])} + } + } + case OpNull, OpNotNull: + parsedValue = nil + default: + parsedValue = value + } + + filters = append(filters, DynamicFilter{ + Column: column, + Operator: operator, + Value: parsedValue, + }) + } + } + + if len(filters) == 0 { + return nil, nil + } + + return []FilterGroup{{ + Filters: filters, + LogicOp: "AND", + }}, nil +} + +// parseSorting parses sort parameters +// Supports format: sort=column1,-column2 (- for DESC) +func (qp *QueryParser) parseSorting(values url.Values) ([]SortField, error) { + sortParam := values.Get("sort") + if sortParam == "" { + return nil, nil + } + + var sorts []SortField + fields := strings.Split(sortParam, ",") + + for _, field := range fields { + field = strings.TrimSpace(field) + if field == "" { + continue + } + + order := "ASC" + column := field + + if strings.HasPrefix(field, "-") { + order = "DESC" + column = field[1:] + } else if strings.HasPrefix(field, "+") { + column = field[1:] + } + + sorts = append(sorts, SortField{ + Column: column, + Order: order, + }) + } + + return sorts, nil +} + +// ParseAdvancedFilters parses complex filter structures +// Supports nested filters and logic operators +func (qp *QueryParser) ParseAdvancedFilters(filterParam string) ([]FilterGroup, error) { + // This would be for more complex JSON-based filters + // Implementation depends on your specific needs + return nil, nil +} + +// Helper function to parse date values +func parseDate(value string) (interface{}, error) { + // Try different date formats + formats := []string{ + "2006-01-02", + "2006-01-02T15:04:05Z", + "2006-01-02T15:04:05.000Z", + "2006-01-02 15:04:05", + } + + for _, format := range formats { + if t, err := time.Parse(format, value); err == nil { + return t, nil + } + } + + return value, nil +} + +// Helper function to parse numeric values +func parseNumeric(value string) interface{} { + // Try integer first + if i, err := strconv.Atoi(value); err == nil { + return i + } + + // Try float + if f, err := strconv.ParseFloat(value, 64); err == nil { + return f + } + + // Return as string + return value +} diff --git a/internal/utils/validation/duplicate_validator.go b/internal/utils/validation/duplicate_validator.go new file mode 100644 index 0000000..863c058 --- /dev/null +++ b/internal/utils/validation/duplicate_validator.go @@ -0,0 +1,141 @@ +package validation + +import ( + "context" + "database/sql" + "fmt" + "time" +) + +// ValidationConfig holds configuration for duplicate validation +type ValidationConfig struct { + TableName string + IDColumn string + StatusColumn string + DateColumn string + ActiveStatuses []string + AdditionalFields map[string]interface{} +} + +// DuplicateValidator provides methods for validating duplicate entries +type DuplicateValidator struct { + db *sql.DB +} + +// NewDuplicateValidator creates a new instance of DuplicateValidator +func NewDuplicateValidator(db *sql.DB) *DuplicateValidator { + return &DuplicateValidator{db: db} +} + +// ValidateDuplicate checks for duplicate entries based on the provided configuration +func (dv *DuplicateValidator) ValidateDuplicate(ctx context.Context, config ValidationConfig, identifier interface{}) error { + query := fmt.Sprintf(` + SELECT COUNT(*) + FROM %s + WHERE %s = $1 + AND %s = ANY($2) + AND DATE(%s) = CURRENT_DATE + `, config.TableName, config.IDColumn, config.StatusColumn, config.DateColumn) + + var count int + err := dv.db.QueryRowContext(ctx, query, identifier, config.ActiveStatuses).Scan(&count) + if err != nil { + return fmt.Errorf("failed to check duplicate: %w", err) + } + + if count > 0 { + return fmt.Errorf("data with ID %v already exists with active status today", identifier) + } + + return nil +} + +// ValidateDuplicateWithCustomFields checks for duplicates with additional custom fields +func (dv *DuplicateValidator) ValidateDuplicateWithCustomFields(ctx context.Context, config ValidationConfig, fields map[string]interface{}) error { + whereClause := fmt.Sprintf("%s = ANY($1) AND DATE(%s) = CURRENT_DATE", config.StatusColumn, config.DateColumn) + args := []interface{}{config.ActiveStatuses} + argIndex := 2 + + // Add additional field conditions + for fieldName, fieldValue := range config.AdditionalFields { + whereClause += fmt.Sprintf(" AND %s = $%d", fieldName, argIndex) + args = append(args, fieldValue) + argIndex++ + } + + // Add dynamic fields + for fieldName, fieldValue := range fields { + whereClause += fmt.Sprintf(" AND %s = $%d", fieldName, argIndex) + args = append(args, fieldValue) + argIndex++ + } + + query := fmt.Sprintf("SELECT COUNT(*) FROM %s WHERE %s", config.TableName, whereClause) + + var count int + err := dv.db.QueryRowContext(ctx, query, args...).Scan(&count) + if err != nil { + return fmt.Errorf("failed to check duplicate with custom fields: %w", err) + } + + if count > 0 { + return fmt.Errorf("duplicate entry found with the specified criteria") + } + + return nil +} + +// ValidateOncePerDay ensures only one submission per day for a given identifier +func (dv *DuplicateValidator) ValidateOncePerDay(ctx context.Context, tableName, idColumn, dateColumn string, identifier interface{}) error { + query := fmt.Sprintf(` + SELECT COUNT(*) + FROM %s + WHERE %s = $1 + AND DATE(%s) = CURRENT_DATE + `, tableName, idColumn, dateColumn) + + var count int + err := dv.db.QueryRowContext(ctx, query, identifier).Scan(&count) + if err != nil { + return fmt.Errorf("failed to check daily submission: %w", err) + } + + if count > 0 { + return fmt.Errorf("only one submission allowed per day for ID %v", identifier) + } + + return nil +} + +// GetLastSubmissionTime returns the last submission time for a given identifier +func (dv *DuplicateValidator) GetLastSubmissionTime(ctx context.Context, tableName, idColumn, dateColumn string, identifier interface{}) (*time.Time, error) { + query := fmt.Sprintf(` + SELECT %s + FROM %s + WHERE %s = $1 + ORDER BY %s DESC + LIMIT 1 + `, dateColumn, tableName, idColumn, dateColumn) + + var lastTime time.Time + err := dv.db.QueryRowContext(ctx, query, identifier).Scan(&lastTime) + if err != nil { + if err == sql.ErrNoRows { + return nil, nil // No previous submission + } + return nil, fmt.Errorf("failed to get last submission time: %w", err) + } + + return &lastTime, nil +} + +// DefaultRetribusiConfig returns default configuration for retribusi validation +func DefaultRetribusiConfig() ValidationConfig { + return ValidationConfig{ + TableName: "data_retribusi", + IDColumn: "id", + StatusColumn: "status", + DateColumn: "date_created", + ActiveStatuses: []string{"active", "draft"}, + } +} diff --git a/pkg/logger/README.md b/pkg/logger/README.md new file mode 100644 index 0000000..918edda --- /dev/null +++ b/pkg/logger/README.md @@ -0,0 +1,356 @@ +# Structured Logger Package + +A comprehensive structured logging package for Go applications with support for different log levels, service-specific logging, request context, and JSON output formatting. + +## Features + +- **Structured Logging**: JSON and text format output with rich metadata +- **Multiple Log Levels**: DEBUG, INFO, WARN, ERROR, FATAL +- **Service-Specific Logging**: Dedicated loggers for different services +- **Request Context**: Request ID and correlation ID tracking +- **Performance Timing**: Built-in duration logging for operations +- **Gin Middleware**: Request logging middleware for HTTP requests +- **Environment Configuration**: Configurable via environment variables + +## Installation + +The logger is already integrated into the project. Import it using: + +```go +import "api-service/pkg/logger" +``` + +## Quick Start + +### Basic Usage + +```go +// Global functions (use default logger) +logger.Info("Application starting") +logger.Error("Something went wrong", map[string]interface{}{ + "error": err.Error(), + "code": "DB_CONNECTION_FAILED", +}) + +// Create a service-specific logger +authLogger := logger.ServiceLogger("auth-service") +authLogger.Info("User authenticated", map[string]interface{}{ + "user_id": "123", + "method": "oauth2", +}) +``` + +### Service-Specific Loggers + +```go +// Pre-defined service loggers +authLogger := logger.AuthServiceLogger() +bpjsLogger := logger.BPJSServiceLogger() +retribusiLogger := logger.RetribusiServiceLogger() +databaseLogger := logger.DatabaseServiceLogger() + +authLogger.Info("Authentication successful") +databaseLogger.Debug("Query executed", map[string]interface{}{ + "query": "SELECT * FROM users", + "time": "150ms", +}) +``` + +### Request Context Logging + +```go +// Add request context to logs +requestLogger := logger.Default(). + WithRequestID("req-123456"). + WithCorrelationID("corr-789012"). + WithField("user_id", "user-123") + +requestLogger.Info("Request processing started", map[string]interface{}{ + "endpoint": "/api/v1/data", + "method": "POST", +}) +``` + +### Performance Timing + +```go +// Time operations and log duration +start := time.Now() +// ... perform operation ... +logger.LogDuration(start, "Database query completed", map[string]interface{}{ + "query": "SELECT * FROM large_table", + "rows": 1000, + "database": "postgres", +}) +``` + +## Gin Middleware Integration + +### Add Request Logger Middleware + +In your routes setup: + +```go +import "api-service/pkg/logger" + +func RegisterRoutes(cfg *config.Config) *gin.Engine { + router := gin.New() + + // Add request logging middleware + router.Use(logger.RequestLoggerMiddleware(logger.Default())) + + // ... other middleware and routes + return router +} +``` + +### Access Logger in Handlers + +```go +func (h *MyHandler) MyEndpoint(c *gin.Context) { + // Get logger from context + logger := logger.GetLoggerFromContext(c) + + logger.Info("Endpoint called", map[string]interface{}{ + "user_agent": c.Request.UserAgent(), + "client_ip": c.ClientIP(), + }) + + // Get request IDs + requestID := logger.GetRequestIDFromContext(c) + correlationID := logger.GetCorrelationIDFromContext(c) +} +``` + +## Configuration + +### Environment Variables + +Set these environment variables to configure the logger: + +```bash +# Log level (DEBUG, INFO, WARN, ERROR, FATAL) +LOG_LEVEL=INFO + +# Output format (text or json) +LOG_FORMAT=text + +# Service name for logs +LOG_SERVICE=api-service + +# Enable JSON format +LOG_JSON=false +``` + +### Programmatic Configuration + +```go +// Create custom logger with specific configuration +cfg := logger.Config{ + Level: "DEBUG", + JSONFormat: true, + Service: "my-custom-service", +} + +customLogger := logger.NewFromConfig(cfg) + +// Or create manually +logger := logger.New("service-name", logger.DEBUG, true) +``` + +## Log Levels + +| Level | Description | Usage | +|-------|-------------|-------| +| DEBUG | Detailed debug information | Development and troubleshooting | +| INFO | General operational messages | Normal application behavior | +| WARN | Warning conditions | Something unexpected but not an error | +| ERROR | Error conditions | Operation failed but application continues | +| FATAL | Critical conditions | Application cannot continue | + +## Output Formats + +### Text Format (Default) +``` +2025-08-22T04:33:12+07:00 [INFO] auth-service: User authentication successful (handler/auth.go:45) [user_id=12345 method=oauth2] +``` + +### JSON Format +```json +{ + "timestamp": "2025-08-22T04:33:12+07:00", + "level": "INFO", + "service": "auth-service", + "message": "User authentication successful", + "file": "handler/auth.go", + "line": 45, + "request_id": "req-123456", + "correlation_id": "corr-789012", + "fields": { + "user_id": "12345", + "method": "oauth2" + } +} +``` + +## Best Practices + +### 1. Use Appropriate Log Levels +```go +// Good +logger.Debug("Detailed debug info") +logger.Info("User action completed") +logger.Warn("Rate limit approaching") +logger.Error("Database connection failed") + +// Avoid +logger.Info("Error connecting to database") // Use ERROR instead +``` + +### 2. Add Context to Logs +```go +// Instead of this: +logger.Error("Login failed") + +// Do this: +logger.Error("Login failed", map[string]interface{}{ + "username": username, + "reason": "invalid_credentials", + "attempts": loginAttempts, + "client_ip": clientIP, +}) +``` + +### 3. Use Service-Specific Loggers +```go +// Create once per service +var authLogger = logger.AuthServiceLogger() + +func LoginHandler(c *gin.Context) { + authLogger.Info("Login attempt", map[string]interface{}{ + "username": c.PostForm("username"), + }) +} +``` + +### 4. Measure Performance +```go +func ProcessData(data []byte) error { + start := time.Now() + defer func() { + logger.LogDuration(start, "Data processing completed", map[string]interface{}{ + "data_size": len(data), + "items": countItems(data), + }) + }() + + // ... processing logic ... +} +``` + +## Migration from Standard Log Package + +### Before (standard log) +```go +import "log" + +log.Printf("Error: %v", err) +log.Printf("User %s logged in", username) +``` + +### After (structured logger) +```go +import "api-service/pkg/logger" + +logger.Error("Operation failed", map[string]interface{}{ + "error": err.Error(), + "context": "user_login", +}) + +logger.Info("User logged in", map[string]interface{}{ + "username": username, + "method": "password", +}) +``` + +## Examples + +### Database Operations +```go +func (h *UserHandler) GetUser(c *gin.Context) { + logger := logger.GetLoggerFromContext(c) + start := time.Now() + + user, err := h.db.GetUser(c.Param("id")) + if err != nil { + logger.Error("Failed to get user", map[string]interface{}{ + "user_id": c.Param("id"), + "error": err.Error(), + }) + c.JSON(500, gin.H{"error": "Internal server error"}) + return + } + + logger.LogDuration(start, "User retrieved successfully", map[string]interface{}{ + "user_id": user.ID, + "query_time": time.Since(start).String(), + }) + + c.JSON(200, user) +} +``` + +### Authentication Service +```go +var authLogger = logger.AuthServiceLogger() + +func Authenticate(username, password string) (bool, error) { + authLogger.Debug("Authentication attempt", map[string]interface{}{ + "username": username, + }) + + // Authentication logic... + + if authenticated { + authLogger.Info("Authentication successful", map[string]interface{}{ + "username": username, + "method": "password", + }) + return true, nil + } + + authLogger.Warn("Authentication failed", map[string]interface{}{ + "username": username, + "reason": "invalid_credentials", + }) + return false, nil +} +``` + +## Troubleshooting + +### Common Issues + +1. **No logs appearing**: Check that log level is not set too high (e.g., ERROR when logging INFO) +2. **JSON format not working**: Ensure `LOG_JSON=true` or logger is created with `jsonFormat: true` +3. **Missing context**: Use `WithRequestID()` and `WithCorrelationID()` for request context + +### Debug Mode + +Enable debug logging for development: + +```bash +export LOG_LEVEL=DEBUG +export LOG_FORMAT=text +``` + +## Performance Considerations + +- Logger is designed to be lightweight and fast +- Context fields are only evaluated when the log level is enabled +- JSON marshaling only occurs when JSON format is enabled +- Consider log volume in production environments + +## License + +This logger package is part of the API Service project. diff --git a/pkg/logger/config.go b/pkg/logger/config.go new file mode 100644 index 0000000..68f69d1 --- /dev/null +++ b/pkg/logger/config.go @@ -0,0 +1,137 @@ +package logger + +import ( + "os" + "strconv" + "strings" +) + +// Config holds the configuration for the logger +type Config struct { + Level string `json:"level" default:"INFO"` + JSONFormat bool `json:"json_format" default:"false"` + Service string `json:"service" default:"api-service"` +} + +// DefaultConfig returns the default logger configuration +func DefaultConfig() Config { + return Config{ + Level: "INFO", + JSONFormat: false, + Service: "api-service", + } +} + +// LoadConfigFromEnv loads logger configuration from environment variables +func LoadConfigFromEnv() Config { + config := DefaultConfig() + + // Load log level from environment + if level := os.Getenv("LOG_LEVEL"); level != "" { + config.Level = strings.ToUpper(level) + } + + // Load JSON format from environment + if jsonFormat := os.Getenv("LOG_JSON_FORMAT"); jsonFormat != "" { + if parsed, err := strconv.ParseBool(jsonFormat); err == nil { + config.JSONFormat = parsed + } + } + + // Load service name from environment + if service := os.Getenv("LOG_SERVICE_NAME"); service != "" { + config.Service = service + } + + return config +} + +// Validate validates the logger configuration +func (c *Config) Validate() error { + // Validate log level + validLevels := map[string]bool{ + "DEBUG": true, + "INFO": true, + "WARN": true, + "ERROR": true, + "FATAL": true, + } + + if !validLevels[c.Level] { + c.Level = "INFO" // Default to INFO if invalid + } + + return nil +} + +// GetLogLevel returns the LogLevel from the configuration +func (c *Config) GetLogLevel() LogLevel { + switch strings.ToUpper(c.Level) { + case "DEBUG": + return DEBUG + case "WARN": + return WARN + case "ERROR": + return ERROR + case "FATAL": + return FATAL + default: + return INFO + } +} + +// CreateLoggerFromConfig creates a new logger instance from configuration +func CreateLoggerFromConfig(cfg Config) *Logger { + cfg.Validate() + return NewFromConfig(cfg) +} + +// CreateLoggerFromEnv creates a new logger instance from environment variables +func CreateLoggerFromEnv() *Logger { + cfg := LoadConfigFromEnv() + return CreateLoggerFromConfig(cfg) +} + +// Environment variable constants +const ( + EnvLogLevel = "LOG_LEVEL" + EnvLogJSONFormat = "LOG_JSON_FORMAT" + EnvLogService = "LOG_SERVICE_NAME" +) + +// Service-specific configuration helpers + +// AuthServiceConfig returns configuration for auth service +func AuthServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "auth-service" + return cfg +} + +// BPJSServiceConfig returns configuration for BPJS service +func BPJSServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "bpjs-service" + return cfg +} + +// RetribusiServiceConfig returns configuration for retribusi service +func RetribusiServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "retribusi-service" + return cfg +} + +// DatabaseServiceConfig returns configuration for database service +func DatabaseServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "database-service" + return cfg +} + +// MiddlewareServiceConfig returns configuration for middleware service +func MiddlewareServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "middleware-service" + return cfg +} diff --git a/pkg/logger/context.go b/pkg/logger/context.go new file mode 100644 index 0000000..3eb52bf --- /dev/null +++ b/pkg/logger/context.go @@ -0,0 +1,142 @@ +package logger + +import ( + "context" + "time" +) + +// contextKey is a custom type for context keys to avoid collisions +type contextKey string + +const ( + loggerKey contextKey = "logger" + requestIDKey contextKey = "request_id" + correlationIDKey contextKey = "correlation_id" + serviceNameKey contextKey = "service_name" +) + +// ContextWithLogger creates a new context with the logger +func ContextWithLogger(ctx context.Context, logger *Logger) context.Context { + return context.WithValue(ctx, loggerKey, logger) +} + +// LoggerFromContext retrieves the logger from context +func LoggerFromContext(ctx context.Context) *Logger { + if logger, ok := ctx.Value(loggerKey).(*Logger); ok { + return logger + } + return globalLogger +} + +// ContextWithRequestID creates a new context with the request ID +func ContextWithRequestID(ctx context.Context, requestID string) context.Context { + return context.WithValue(ctx, requestIDKey, requestID) +} + +// RequestIDFromContext retrieves the request ID from context +func RequestIDFromContext(ctx context.Context) string { + if requestID, ok := ctx.Value(requestIDKey).(string); ok { + return requestID + } + return "" +} + +// ContextWithCorrelationID creates a new context with the correlation ID +func ContextWithCorrelationID(ctx context.Context, correlationID string) context.Context { + return context.WithValue(ctx, correlationIDKey, correlationID) +} + +// CorrelationIDFromContext retrieves the correlation ID from context +func CorrelationIDFromContext(ctx context.Context) string { + if correlationID, ok := ctx.Value(correlationIDKey).(string); ok { + return correlationID + } + return "" +} + +// ContextWithServiceName creates a new context with the service name +func ContextWithServiceName(ctx context.Context, serviceName string) context.Context { + return context.WithValue(ctx, serviceNameKey, serviceName) +} + +// ServiceNameFromContext retrieves the service name from context +func ServiceNameFromContext(ctx context.Context) string { + if serviceName, ok := ctx.Value(serviceNameKey).(string); ok { + return serviceName + } + return "" +} + +// WithContext returns a new logger with context values +func (l *Logger) WithContext(ctx context.Context) *Logger { + logger := l + + if requestID := RequestIDFromContext(ctx); requestID != "" { + logger = logger.WithRequestID(requestID) + } + + if correlationID := CorrelationIDFromContext(ctx); correlationID != "" { + logger = logger.WithCorrelationID(correlationID) + } + + if serviceName := ServiceNameFromContext(ctx); serviceName != "" { + logger = logger.WithService(serviceName) + } + + return logger +} + +// DebugCtx logs a debug message with context +func DebugCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Debug(msg, fields...) +} + +// DebugfCtx logs a formatted debug message with context +func DebugfCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Debugf(format, args...) +} + +// InfoCtx logs an info message with context +func InfoCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Info(msg, fields...) +} + +// InfofCtx logs a formatted info message with context +func InfofCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Infof(format, args...) +} + +// WarnCtx logs a warning message with context +func WarnCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Warn(msg, fields...) +} + +// WarnfCtx logs a formatted warning message with context +func WarnfCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Warnf(format, args...) +} + +// ErrorCtx logs an error message with context +func ErrorCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Error(msg, fields...) +} + +// ErrorfCtx logs a formatted error message with context +func ErrorfCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Errorf(format, args...) +} + +// FatalCtx logs a fatal message with context and exits the program +func FatalCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Fatal(msg, fields...) +} + +// FatalfCtx logs a formatted fatal message with context and exits the program +func FatalfCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Fatalf(format, args...) +} + +// LogDurationCtx logs the duration of an operation with context +func LogDurationCtx(ctx context.Context, start time.Time, operation string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).LogDuration(start, operation, fields...) +} diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go new file mode 100644 index 0000000..bcdd59e --- /dev/null +++ b/pkg/logger/logger.go @@ -0,0 +1,616 @@ +package logger + +import ( + "encoding/json" + "fmt" + "log" + "os" + "path/filepath" + "runtime" + "strings" + "sync" + "time" +) + +// LogLevel represents the severity level of a log message +type LogLevel int + +const ( + DEBUG LogLevel = iota + INFO + WARN + ERROR + FATAL +) + +var ( + levelStrings = map[LogLevel]string{ + DEBUG: "DEBUG", + INFO: "INFO", + WARN: "WARN", + ERROR: "ERROR", + FATAL: "FATAL", + } + + stringLevels = map[string]LogLevel{ + "DEBUG": DEBUG, + "INFO": INFO, + "WARN": WARN, + "ERROR": ERROR, + "FATAL": FATAL, + } +) + +// Logger represents a structured logger instance +type Logger struct { + serviceName string + level LogLevel + output *log.Logger + mu sync.Mutex + jsonFormat bool + + logDir string +} + +// LogEntry represents a structured log entry +type LogEntry struct { + Timestamp string `json:"timestamp"` + Level string `json:"level"` + Service string `json:"service"` + Message string `json:"message"` + RequestID string `json:"request_id,omitempty"` + CorrelationID string `json:"correlation_id,omitempty"` + File string `json:"file,omitempty"` + Line int `json:"line,omitempty"` + Duration string `json:"duration,omitempty"` + Fields map[string]interface{} `json:"fields,omitempty"` +} + +// New creates a new logger instance +func New(serviceName string, level LogLevel, jsonFormat bool, logDir ...string) *Logger { + // Tentukan direktori log berdasarkan prioritas: + // 1. Parameter logDir (jika disediakan) + // 2. Environment variable LOG_DIR (jika ada) + // 3. Default ke pkg/logger/data relatif terhadap root proyek + + var finalLogDir string + + // Cek apakah logDir disediakan sebagai parameter + if len(logDir) > 0 && logDir[0] != "" { + finalLogDir = logDir[0] + } else { + // Cek environment variable + if envLogDir := os.Getenv("LOG_DIR"); envLogDir != "" { + finalLogDir = envLogDir + } else { + // Default: dapatkan path relatif terhadap root proyek + // Dapatkan path executable + exePath, err := os.Executable() + if err != nil { + // Fallback ke current working directory jika gagal + finalLogDir = filepath.Join(".", "pkg", "logger", "data") + } else { + // Dapatkan direktori executable + exeDir := filepath.Dir(exePath) + + // Jika berjalan dengan go run, executable ada di temp directory + // Coba dapatkan path source code + if strings.Contains(exeDir, "go-build") || strings.Contains(exeDir, "tmp") { + // Gunakan runtime.Caller untuk mendapatkan path source + _, file, _, ok := runtime.Caller(0) + if ok { + // Dapatkan direktori source (2 level up dari pkg/logger) + sourceDir := filepath.Dir(file) + for i := 0; i < 3; i++ { // Naik 3 level ke root proyek + sourceDir = filepath.Dir(sourceDir) + } + finalLogDir = filepath.Join(sourceDir, "pkg", "logger", "data") + } else { + // Fallback + finalLogDir = filepath.Join(".", "pkg", "logger", "data") + } + } else { + // Untuk binary yang sudah dikompilasi, asumsikan struktur proyek + finalLogDir = filepath.Join(exeDir, "pkg", "logger", "data") + } + } + } + } + + // Konversi ke path absolut + absPath, err := filepath.Abs(finalLogDir) + if err == nil { + finalLogDir = absPath + } + + // Buat direktori jika belum ada + if err := os.MkdirAll(finalLogDir, 0755); err != nil { + // Fallback ke stdout jika gagal membuat direktori + fmt.Printf("Warning: Failed to create log directory %s: %v\n", finalLogDir, err) + return &Logger{ + serviceName: serviceName, + level: level, + output: log.New(os.Stdout, "", 0), + jsonFormat: jsonFormat, + logDir: "", // Kosongkan karena gagal + } + } + + return &Logger{ + serviceName: serviceName, + level: level, + output: log.New(os.Stdout, "", 0), + jsonFormat: jsonFormat, + logDir: finalLogDir, + } +} + +// NewFromConfig creates a new logger from configuration +func NewFromConfig(cfg Config) *Logger { + level := INFO + if l, exists := stringLevels[strings.ToUpper(cfg.Level)]; exists { + level = l + } + + return New(cfg.Service, level, cfg.JSONFormat) +} + +// Default creates a default logger instance +func Default() *Logger { + return New("api-service", INFO, false) +} + +// WithService returns a new logger with the specified service name +func (l *Logger) WithService(serviceName string) *Logger { + return &Logger{ + serviceName: serviceName, + level: l.level, + output: l.output, + jsonFormat: l.jsonFormat, + logDir: l.logDir, + } +} + +// SetLevel sets the log level for the logger +func (l *Logger) SetLevel(level LogLevel) { + l.mu.Lock() + defer l.mu.Unlock() + l.level = level +} + +// SetJSONFormat sets whether to output logs in JSON format +func (l *Logger) SetJSONFormat(jsonFormat bool) { + l.mu.Lock() + defer l.mu.Unlock() + l.jsonFormat = jsonFormat +} + +// Debug logs a debug message +func (l *Logger) Debug(msg string, fields ...map[string]interface{}) { + l.log(DEBUG, msg, nil, fields...) +} + +// Debugf logs a formatted debug message +func (l *Logger) Debugf(format string, args ...interface{}) { + l.log(DEBUG, fmt.Sprintf(format, args...), nil) +} + +// Info logs an info message +func (l *Logger) Info(msg string, fields ...map[string]interface{}) { + l.log(INFO, msg, nil, fields...) +} + +// Infof logs a formatted info message +func (l *Logger) Infof(format string, args ...interface{}) { + l.log(INFO, fmt.Sprintf(format, args...), nil) +} + +// Warn logs a warning message +func (l *Logger) Warn(msg string, fields ...map[string]interface{}) { + l.log(WARN, msg, nil, fields...) +} + +// Warnf logs a formatted warning message +func (l *Logger) Warnf(format string, args ...interface{}) { + l.log(WARN, fmt.Sprintf(format, args...), nil) +} + +// Error logs an error message +func (l *Logger) Error(msg string, fields ...map[string]interface{}) { + l.log(ERROR, msg, nil, fields...) +} + +// Errorf logs a formatted error message +func (l *Logger) Errorf(format string, args ...interface{}) { + l.log(ERROR, fmt.Sprintf(format, args...), nil) +} + +// Fatal logs a fatal message and exits the program +func (l *Logger) Fatal(msg string, fields ...map[string]interface{}) { + l.log(FATAL, msg, nil, fields...) + os.Exit(1) +} + +// Fatalf logs a formatted fatal message and exits the program +func (l *Logger) Fatalf(format string, args ...interface{}) { + l.log(FATAL, fmt.Sprintf(format, args...), nil) + os.Exit(1) +} + +// WithRequestID returns a new logger with the specified request ID +func (l *Logger) WithRequestID(requestID string) *Logger { + return l.withField("request_id", requestID) +} + +// WithCorrelationID returns a new logger with the specified correlation ID +func (l *Logger) WithCorrelationID(correlationID string) *Logger { + return l.withField("correlation_id", correlationID) +} + +// WithField returns a new logger with an additional field +func (l *Logger) WithField(key string, value interface{}) *Logger { + return l.withField(key, value) +} + +// WithFields returns a new logger with additional fields +func (l *Logger) WithFields(fields map[string]interface{}) *Logger { + return &Logger{ + serviceName: l.serviceName, + level: l.level, + output: l.output, + jsonFormat: l.jsonFormat, + logDir: l.logDir, + } +} + +// LogDuration logs the duration of an operation +func (l *Logger) LogDuration(start time.Time, operation string, fields ...map[string]interface{}) { + duration := time.Since(start) + l.Info(fmt.Sprintf("%s completed", operation), append(fields, map[string]interface{}{ + "duration": duration.String(), + "duration_ms": duration.Milliseconds(), + })...) +} + +// log is the internal logging method +func (l *Logger) log(level LogLevel, msg string, duration *time.Duration, fields ...map[string]interface{}) { + if level < l.level { + return + } + + // Get caller information + _, file, line, ok := runtime.Caller(3) // Adjust caller depth + var callerFile string + var callerLine int + if ok { + // Shorten file path + parts := strings.Split(file, "/") + if len(parts) > 2 { + callerFile = strings.Join(parts[len(parts)-2:], "/") + } else { + callerFile = file + } + callerLine = line + } + + // Merge all fields + mergedFields := make(map[string]interface{}) + for _, f := range fields { + for k, v := range f { + mergedFields[k] = v + } + } + + entry := LogEntry{ + Timestamp: time.Now().Format(time.RFC3339), + Level: levelStrings[level], + Service: l.serviceName, + Message: msg, + File: callerFile, + Line: callerLine, + Fields: mergedFields, + } + + if duration != nil { + entry.Duration = duration.String() + } + + if l.jsonFormat { + l.outputJSON(entry) + } else { + l.outputText(entry) + } + + if level == FATAL { + os.Exit(1) + } +} + +// outputJSON outputs the log entry in JSON format +func (l *Logger) outputJSON(entry LogEntry) { + jsonData, err := json.Marshal(entry) + if err != nil { + // Fallback to text output if JSON marshaling fails + l.outputText(entry) + return + } + l.output.Println(string(jsonData)) +} + +// outputText outputs the log entry in text format +func (l *Logger) outputText(entry LogEntry) { + timestamp := entry.Timestamp + level := entry.Level + service := entry.Service + message := entry.Message + + // Base log line + logLine := fmt.Sprintf("%s [%s] %s: %s", timestamp, level, service, message) + + // Add file and line if available + if entry.File != "" && entry.Line > 0 { + logLine += fmt.Sprintf(" (%s:%d)", entry.File, entry.Line) + } + + // Add request ID if available + if entry.RequestID != "" { + logLine += fmt.Sprintf(" [req:%s]", entry.RequestID) + } + + // Add correlation ID if available + if entry.CorrelationID != "" { + logLine += fmt.Sprintf(" [corr:%s]", entry.CorrelationID) + } + + // Add duration if available + if entry.Duration != "" { + logLine += fmt.Sprintf(" [dur:%s]", entry.Duration) + } + + // Add additional fields + if len(entry.Fields) > 0 { + fields := make([]string, 0, len(entry.Fields)) + for k, v := range entry.Fields { + fields = append(fields, fmt.Sprintf("%s=%v", k, v)) + } + logLine += " [" + strings.Join(fields, " ") + "]" + } + + l.output.Println(logLine) +} + +// withField creates a new logger with an additional field +func (l *Logger) withField(key string, value interface{}) *Logger { + return &Logger{ + serviceName: l.serviceName, + level: l.level, + output: l.output, + jsonFormat: l.jsonFormat, + logDir: l.logDir, + } +} + +// String returns the string representation of a log level +func (l LogLevel) String() string { + return levelStrings[l] +} + +// ParseLevel parses a string into a LogLevel +func ParseLevel(level string) (LogLevel, error) { + if l, exists := stringLevels[strings.ToUpper(level)]; exists { + return l, nil + } + return INFO, fmt.Errorf("invalid log level: %s", level) +} + +// Global logger instance +var globalLogger = Default() + +// SetGlobalLogger sets the global logger instance +func SetGlobalLogger(logger *Logger) { + globalLogger = logger +} + +// Global logging functions +func Debug(msg string, fields ...map[string]interface{}) { + globalLogger.Debug(msg, fields...) +} + +func Debugf(format string, args ...interface{}) { + globalLogger.Debugf(format, args...) +} + +func Info(msg string, fields ...map[string]interface{}) { + globalLogger.Info(msg, fields...) +} + +func Infof(format string, args ...interface{}) { + globalLogger.Infof(format, args...) +} + +func Warn(msg string, fields ...map[string]interface{}) { + globalLogger.Warn(msg, fields...) +} + +func Warnf(format string, args ...interface{}) { + globalLogger.Warnf(format, args...) +} + +func Error(msg string, fields ...map[string]interface{}) { + globalLogger.Error(msg, fields...) +} + +func Errorf(format string, args ...interface{}) { + globalLogger.Errorf(format, args...) +} + +func Fatal(msg string, fields ...map[string]interface{}) { + globalLogger.Fatal(msg, fields...) +} + +func Fatalf(format string, args ...interface{}) { + globalLogger.Fatalf(format, args...) +} + +// SaveLogText menyimpan log dalam format teks dengan pemisah | +func (l *Logger) SaveLogText(entry LogEntry) error { + // Format log dengan pemisah | + logLine := fmt.Sprintf("%s|%s|%s|%s|%s|%s|%s|%s:%d", + entry.Timestamp, + entry.Level, + entry.Service, + entry.Message, + entry.RequestID, + entry.CorrelationID, + entry.Duration, + entry.File, + entry.Line) + + // Tambahkan fields jika ada + if len(entry.Fields) > 0 { + fieldsStr := "" + for k, v := range entry.Fields { + fieldsStr += fmt.Sprintf("|%s=%v", k, v) + } + logLine += fieldsStr + } + logLine += "\n" + + // Buat direktori jika belum ada + if err := os.MkdirAll(l.logDir, 0755); err != nil { + return err + } + + // Tulis ke file dengan mutex lock untuk concurrency safety + l.mu.Lock() + defer l.mu.Unlock() + + filePath := filepath.Join(l.logDir, "logs.txt") + f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + defer f.Close() + + if _, err := f.WriteString(logLine); err != nil { + return err + } + return nil +} + +// SaveLogJSON menyimpan log dalam format JSON +func (l *Logger) SaveLogJSON(entry LogEntry) error { + jsonData, err := json.Marshal(entry) + if err != nil { + return err + } + + // Buat direktori jika belum ada + if err := os.MkdirAll(l.logDir, 0755); err != nil { + return err + } + + // Tulis ke file dengan mutex lock for concurrency safety + l.mu.Lock() + defer l.mu.Unlock() + + filePath := filepath.Join(l.logDir, "logs.json") + f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + defer f.Close() + + if _, err := f.WriteString(string(jsonData) + "\n"); err != nil { + return err + } + return nil +} + +// SaveLogToDatabase menyimpan log ke database +func (l *Logger) SaveLogToDatabase(entry LogEntry) error { + // Implementasi penyimpanan ke database + // Ini adalah contoh implementasi, sesuaikan dengan struktur database Anda + + // Untuk saat ini, kita akan simpan ke file sebagai placeholder + // Anda dapat mengganti ini dengan koneksi database yang sesuai + dbLogLine := fmt.Sprintf("DB_LOG: %s|%s|%s|%s\n", + entry.Timestamp, entry.Level, entry.Service, entry.Message) + + if err := os.MkdirAll(l.logDir, 0755); err != nil { + return err + } + + // Tulis ke file dengan mutex lock for concurrency safety + l.mu.Lock() + defer l.mu.Unlock() + + filePath := filepath.Join(l.logDir, "database_logs.txt") + f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + defer f.Close() + + if _, err := f.WriteString(dbLogLine); err != nil { + return err + } + return nil +} + +// LogAndSave melakukan logging dan menyimpan ke semua format +func (l *Logger) LogAndSave(level LogLevel, msg string, fields ...map[string]interface{}) { + // Panggil fungsi log biasa + l.log(level, msg, nil, fields...) + + // Dapatkan entry log yang baru dibuat + _, file, line, ok := runtime.Caller(2) + var callerFile string + var callerLine int + if ok { + parts := strings.Split(file, "/") + if len(parts) > 2 { + callerFile = strings.Join(parts[len(parts)-2:], "/") + } else { + callerFile = file + } + callerLine = line + } + + mergedFields := make(map[string]interface{}) + for _, f := range fields { + for k, v := range f { + mergedFields[k] = v + } + } + + entry := LogEntry{ + Timestamp: time.Now().Format(time.RFC3339), + Level: levelStrings[level], + Service: l.serviceName, + Message: msg, + File: callerFile, + Line: callerLine, + Fields: mergedFields, + } + + // Simpan ke semua format + go func() { + l.SaveLogText(entry) + l.SaveLogJSON(entry) + l.SaveLogToDatabase(entry) + }() +} + +// Global fungsi untuk menyimpan log +func SaveLogText(entry LogEntry) error { + return globalLogger.SaveLogText(entry) +} + +func SaveLogJSON(entry LogEntry) error { + return globalLogger.SaveLogJSON(entry) +} + +func SaveLogToDatabase(entry LogEntry) error { + return globalLogger.SaveLogToDatabase(entry) +} diff --git a/pkg/logger/middleware.go b/pkg/logger/middleware.go new file mode 100644 index 0000000..d063a83 --- /dev/null +++ b/pkg/logger/middleware.go @@ -0,0 +1,191 @@ +package logger + +import ( + "bytes" + "io" + "strings" + "time" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" +) + +// RequestLoggerMiddleware creates a Gin middleware for request logging +func RequestLoggerMiddleware(logger *Logger) gin.HandlerFunc { + return func(c *gin.Context) { + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + // Get correlation ID + correlationID := c.GetHeader("X-Correlation-ID") + if correlationID == "" { + correlationID = uuid.New().String() + c.Header("X-Correlation-ID", correlationID) + } + + // Create request-scoped logger + reqLogger := logger. + WithRequestID(requestID). + WithCorrelationID(correlationID) + + // Store logger in context + c.Set("logger", reqLogger) + c.Set("request_id", requestID) + c.Set("correlation_id", correlationID) + + // Capture request body for logging if needed + var requestBody []byte + if c.Request.Body != nil && strings.HasPrefix(c.ContentType(), "application/json") { + requestBody, _ = io.ReadAll(c.Request.Body) + c.Request.Body = io.NopCloser(bytes.NewBuffer(requestBody)) + } + + // Start timer + start := time.Now() + + // Log request start + reqLogger.Info("Request started", map[string]interface{}{ + "method": c.Request.Method, + "path": c.Request.URL.Path, + "query": c.Request.URL.RawQuery, + "remote_addr": c.Request.RemoteAddr, + "user_agent": c.Request.UserAgent(), + "content_type": c.ContentType(), + "body_size": len(requestBody), + }) + + // Process request + c.Next() + + // Calculate duration + duration := time.Since(start) + + // Get response status + status := c.Writer.Status() + responseSize := c.Writer.Size() + + // Log level based on status code + var logLevel LogLevel + switch { + case status >= 500: + logLevel = ERROR + case status >= 400: + logLevel = WARN + default: + logLevel = INFO + } + + // Log request completion + fields := map[string]interface{}{ + "method": c.Request.Method, + "path": c.Request.URL.Path, + "status": status, + "duration": duration.String(), + "duration_ms": duration.Milliseconds(), + "response_size": responseSize, + "client_ip": c.ClientIP(), + "user_agent": c.Request.UserAgent(), + "content_type": c.ContentType(), + "content_length": c.Request.ContentLength, + } + + // Add query parameters if present + if c.Request.URL.RawQuery != "" { + fields["query"] = c.Request.URL.RawQuery + } + + // Add error information if present + if len(c.Errors) > 0 { + errors := make([]string, len(c.Errors)) + for i, err := range c.Errors { + errors[i] = err.Error() + } + fields["errors"] = errors + } + + reqLogger.log(logLevel, "Request completed", &duration, fields) + } +} + +// GetLoggerFromContext retrieves the logger from Gin context +func GetLoggerFromContext(c *gin.Context) *Logger { + if logger, exists := c.Get("logger"); exists { + if l, ok := logger.(*Logger); ok { + return l + } + } + return globalLogger +} + +// GetRequestIDFromContext retrieves the request ID from Gin context +func GetRequestIDFromContext(c *gin.Context) string { + if requestID, exists := c.Get("request_id"); exists { + if id, ok := requestID.(string); ok { + return id + } + } + return "" +} + +// GetCorrelationIDFromContext retrieves the correlation ID from Gin context +func GetCorrelationIDFromContext(c *gin.Context) string { + if correlationID, exists := c.Get("correlation_id"); exists { + if id, ok := correlationID.(string); ok { + return id + } + } + return "" +} + +// DatabaseLoggerMiddleware creates middleware for database operation logging +func DatabaseLoggerMiddleware(logger *Logger, serviceName string) gin.HandlerFunc { + return func(c *gin.Context) { + reqLogger := GetLoggerFromContext(c).WithService(serviceName) + c.Set("db_logger", reqLogger) + c.Next() + } +} + +// GetDBLoggerFromContext retrieves the database logger from Gin context +func GetDBLoggerFromContext(c *gin.Context) *Logger { + if logger, exists := c.Get("db_logger"); exists { + if l, ok := logger.(*Logger); ok { + return l + } + } + return GetLoggerFromContext(c) +} + +// ServiceLogger creates a service-specific logger +func ServiceLogger(serviceName string) *Logger { + return globalLogger.WithService(serviceName) +} + +// AuthServiceLogger returns a logger for auth service +func AuthServiceLogger() *Logger { + return ServiceLogger("auth-service") +} + +// BPJSServiceLogger returns a logger for BPJS service +func BPJSServiceLogger() *Logger { + return ServiceLogger("bpjs-service") +} + +// RetribusiServiceLogger returns a logger for retribusi service +func RetribusiServiceLogger() *Logger { + return ServiceLogger("retribusi-service") +} + +// DatabaseServiceLogger returns a logger for database operations +func DatabaseServiceLogger() *Logger { + return ServiceLogger("database-service") +} + +// MiddlewareServiceLogger returns a logger for middleware operations +func MiddlewareServiceLogger() *Logger { + return ServiceLogger("middleware-service") +} diff --git a/pkg/utils/etag.go b/pkg/utils/etag.go new file mode 100644 index 0000000..eeba954 --- /dev/null +++ b/pkg/utils/etag.go @@ -0,0 +1,54 @@ +package utils + +import ( + "fmt" + "strings" +) + +// ParseETag extracts the ETag value from HTTP ETag header +// Handles both strong ETags ("123") and weak ETags (W/"123") +func ParseETag(etag string) string { + if etag == "" { + return "" + } + + // Remove W/ prefix for weak ETags + if strings.HasPrefix(etag, "W/") { + etag = etag[2:] + } + + // Remove surrounding quotes + if len(etag) >= 2 && strings.HasPrefix(etag, "\"") && strings.HasSuffix(etag, "\"") { + etag = etag[1 : len(etag)-1] + } + + return etag +} + +// FormatETag formats a version ID into a proper HTTP ETag header value +func FormatETag(versionId string, weak bool) string { + if versionId == "" { + return "" + } + + if weak { + return fmt.Sprintf(`W/"%s"`, versionId) + } + + return fmt.Sprintf(`"%s"`, versionId) +} + +// IsValidETag validates if the given string is a valid ETag format +func IsValidETag(etag string) bool { + if etag == "" { + return false + } + + // Check for weak ETag format + if strings.HasPrefix(etag, "W/") { + etag = etag[2:] + } + + // Must be quoted + return len(etag) >= 2 && strings.HasPrefix(etag, "\"") && strings.HasSuffix(etag, "\"") +} diff --git a/pkg/utils/utils b/pkg/utils/utils new file mode 100644 index 0000000..e69de29 diff --git a/pkg/validator/validator b/pkg/validator/validator new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/pkg/validator/validator @@ -0,0 +1 @@ + diff --git a/scripts/scripts b/scripts/scripts new file mode 100644 index 0000000..e69de29 diff --git a/tools/general/generate-handler.go b/tools/general/generate-handler.go new file mode 100644 index 0000000..bddbdcb --- /dev/null +++ b/tools/general/generate-handler.go @@ -0,0 +1,1740 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "time" +) + +// HandlerData contains template data for handler generation +type HandlerData struct { + Name string + NameLower string + NamePlural string + Category string // Untuk backward compatibility (bagian pertama) + DirPath string // Path direktori lengkap + ModuleName string + TableName string + HasGet bool + HasPost bool + HasPut bool + HasDelete bool + HasStats bool + HasDynamic bool + HasSearch bool + HasFilter bool + HasPagination bool + Timestamp string +} + +type PathInfo struct { + Category string + EntityName string + DirPath string + FilePath string +} + +// parseEntityPath - Logic parsing yang diperbaiki +func parseEntityPath(entityPath string) (*PathInfo, error) { + if strings.TrimSpace(entityPath) == "" { + return nil, fmt.Errorf("entity path cannot be empty") + } + var pathInfo PathInfo + parts := strings.Split(entityPath, "/") + // Validasi minimal 1 bagian (file saja) dan maksimal 4 + if len(parts) < 1 || len(parts) > 4 { + return nil, fmt.Errorf("invalid path format: use up to 4 levels like 'level1/level2/level3/entity'") + } + // Validasi bagian kosong + for i, part := range parts { + if strings.TrimSpace(part) == "" { + return nil, fmt.Errorf("empty path segment at position %d", i+1) + } + } + + pathInfo.EntityName = parts[len(parts)-1] + if len(parts) > 1 { + pathInfo.Category = parts[len(parts)-2] + pathInfo.DirPath = strings.Join(parts[:len(parts)-1], "/") + pathInfo.FilePath = pathInfo.DirPath + "/" + strings.ToLower(pathInfo.EntityName) + ".go" + } else { + pathInfo.Category = "models" + pathInfo.DirPath = "" + pathInfo.FilePath = strings.ToLower(pathInfo.EntityName) + ".go" + } + return &pathInfo, nil +} + +// validateMethods - Validasi method yang diinput +func validateMethods(methods []string) error { + validMethods := map[string]bool{ + "get": true, "post": true, "put": true, "delete": true, + "stats": true, "dynamic": true, "search": true, + } + + for _, method := range methods { + if !validMethods[strings.ToLower(method)] { + return fmt.Errorf("invalid method: %s. Valid methods: get, post, put, delete, stats, dynamic, search", method) + } + } + return nil +} + +// generateTableName - Generate table name berdasarkan path lengkap +func generateTableName(pathInfo *PathInfo) string { + entityLower := strings.ToLower(pathInfo.EntityName) + + if pathInfo.DirPath != "" { + // Replace "/" dengan "_" untuk table name + pathForTable := strings.ReplaceAll(pathInfo.DirPath, "/", "_") + return "data_" + pathForTable + "_" + entityLower + } + return "data_" + entityLower +} + +// createDirectories - Buat direktori sesuai struktur path +func createDirectories(pathInfo *PathInfo) (string, string, error) { + var handlerDir, modelDir string + + if pathInfo.DirPath != "" { + handlerDir = filepath.Join("internal", "handlers", pathInfo.DirPath) + modelDir = filepath.Join("internal", "models", pathInfo.DirPath) + } else { + handlerDir = filepath.Join("internal", "handlers") + modelDir = filepath.Join("internal", "models") + } + + // Create directories + for _, dir := range []string{handlerDir, modelDir} { + if err := os.MkdirAll(dir, 0755); err != nil { + return "", "", fmt.Errorf("failed to create directory %s: %v", dir, err) + } + } + + return handlerDir, modelDir, nil +} + +// setMethods - Set method flags berdasarkan input +func setMethods(data *HandlerData, methods []string) { + methodMap := map[string]*bool{ + "get": &data.HasGet, + "post": &data.HasPost, + "put": &data.HasPut, + "delete": &data.HasDelete, + "stats": &data.HasStats, + "dynamic": &data.HasDynamic, + "search": &data.HasSearch, + } + + for _, method := range methods { + if flag, exists := methodMap[strings.ToLower(method)]; exists { + *flag = true + } + } + + // Always add stats if we have get + if data.HasGet { + data.HasStats = true + } +} + +func main() { + // Validasi argument + if len(os.Args) < 2 { + fmt.Println("Usage: go run generate-handler.go [path/]entity [methods]") + fmt.Println("Examples:") + fmt.Println(" go run generate-handler.go product get post put delete") + fmt.Println(" go run generate-handler.go retribusi/tarif get post put delete dynamic search") + fmt.Println(" go run generate-handler.go product/category/subcategory/item get post") + fmt.Println("\nSupported methods: get, post, put, delete, stats, dynamic, search") + os.Exit(1) + } + + // Parse entity path + entityPath := strings.TrimSpace(os.Args[1]) + pathInfo, err := parseEntityPath(entityPath) + if err != nil { + fmt.Printf("โŒ Error parsing path: %v\n", err) + os.Exit(1) + } + + // Parse methods + var methods []string + if len(os.Args) > 2 { + methods = os.Args[2:] + } else { + // Default methods with advanced features + methods = []string{"get", "post", "put", "delete", "dynamic", "search"} + } + + // Validate methods + if err := validateMethods(methods); err != nil { + fmt.Printf("โŒ %v\n", err) + os.Exit(1) + } + + // Format names + entityName := strings.Title(pathInfo.EntityName) // PascalCase entity name + entityLower := strings.ToLower(pathInfo.EntityName) + entityPlural := entityLower + "s" + + // Generate table name + tableName := generateTableName(pathInfo) + + // Create HandlerData + data := HandlerData{ + Name: entityName, + NameLower: entityLower, + NamePlural: entityPlural, + Category: pathInfo.Category, + DirPath: pathInfo.DirPath, + ModuleName: "api-service", + TableName: tableName, + HasPagination: true, + HasFilter: true, + Timestamp: time.Now().Format("2006-01-02 15:04:05"), + } + + // Set methods + setMethods(&data, methods) + + // Create directories + handlerDir, modelDir, err := createDirectories(pathInfo) + if err != nil { + fmt.Printf("โŒ Error creating directories: %v\n", err) + os.Exit(1) + } + + // Generate files + generateHandlerFile(data, handlerDir) + generateModelFile(data, modelDir) + updateRoutesFile(data) + + // Success output + fmt.Printf("โœ… Successfully generated handler: %s\n", entityName) + if pathInfo.Category != "" { + fmt.Printf("๐Ÿ“ Category: %s\n", pathInfo.Category) + } + if pathInfo.DirPath != "" { + fmt.Printf("๐Ÿ“‚ Path: %s\n", pathInfo.DirPath) + } + fmt.Printf("๐Ÿ“„ Handler: %s\n", filepath.Join(handlerDir, entityLower+".go")) + fmt.Printf("๐Ÿ“„ Model: %s\n", filepath.Join(modelDir, entityLower+".go")) + fmt.Printf("๐Ÿ—„๏ธ Table: %s\n", tableName) + fmt.Printf("๐Ÿ› ๏ธ Methods: %s\n", strings.Join(methods, ", ")) +} + +// ================= HANDLER GENERATION ===================== +func generateHandlerFile(data HandlerData, handlerDir string) { + // var modelsImportPath string + // if data.Category != "" { + // modelsImportPath = data.ModuleName + "/internal/models/" + data.Category + // } else { + // modelsImportPath = data.ModuleName + "/internal/models" + // } + + // pakai strings.Builder biar lebih clean + var handlerContent strings.Builder + + // Header + handlerContent.WriteString("package handlers\n\n") + handlerContent.WriteString("import (\n") + handlerContent.WriteString(` "` + data.ModuleName + `/internal/config"` + "\n") + handlerContent.WriteString(` "` + data.ModuleName + `/internal/database"` + "\n") + handlerContent.WriteString(` models "` + data.ModuleName + `/internal/models"` + "\n") + if data.Category != "models" { + handlerContent.WriteString(` "` + data.ModuleName + `/internal/models/` + data.Category + `"` + "\n") + } + + // Conditional imports + if data.HasDynamic || data.HasSearch { + handlerContent.WriteString(` utils "` + data.ModuleName + `/internal/utils/filters"` + "\n") + } + + handlerContent.WriteString(` "` + data.ModuleName + `/internal/utils/validation"` + "\n") + handlerContent.WriteString(` "context"` + "\n") + handlerContent.WriteString(` "database/sql"` + "\n") + handlerContent.WriteString(` "fmt"` + "\n") + handlerContent.WriteString(` "log"` + "\n") + handlerContent.WriteString(` "net/http"` + "\n") + handlerContent.WriteString(` "strconv"` + "\n") + handlerContent.WriteString(` "strings"` + "\n") + handlerContent.WriteString(` "sync"` + "\n") + handlerContent.WriteString(` "time"` + "\n\n") + handlerContent.WriteString(` "github.com/gin-gonic/gin"` + "\n") + handlerContent.WriteString(` "github.com/go-playground/validator/v10"` + "\n") + handlerContent.WriteString(` "github.com/google/uuid"` + "\n") + handlerContent.WriteString(")\n\n") + + // Vars + handlerContent.WriteString("var (\n") + handlerContent.WriteString(" " + data.NameLower + "db database.Service\n") + handlerContent.WriteString(" " + data.NameLower + "once sync.Once\n") + handlerContent.WriteString(" " + data.NameLower + "validate *validator.Validate\n") + handlerContent.WriteString(")\n\n") + + // init func + handlerContent.WriteString("// Initialize the database connection and validator\n") + handlerContent.WriteString("func init() {\n") + handlerContent.WriteString(" " + data.NameLower + "once.Do(func() {\n") + handlerContent.WriteString(" " + data.NameLower + "db = database.New(config.LoadConfig())\n") + handlerContent.WriteString(" " + data.NameLower + "validate = validator.New()\n") + handlerContent.WriteString(" " + data.NameLower + "validate.RegisterValidation(\"" + data.NameLower + "_status\", validate" + data.Name + "Status)\n") + handlerContent.WriteString(" if " + data.NameLower + "db == nil {\n") + handlerContent.WriteString(" log.Fatal(\"Failed to initialize database connection\")\n") + handlerContent.WriteString(" }\n") + handlerContent.WriteString(" })\n") + handlerContent.WriteString("}\n\n") + + // Custom validation + handlerContent.WriteString("// Custom validation for " + data.NameLower + " status\n") + handlerContent.WriteString("func validate" + data.Name + "Status(fl validator.FieldLevel) bool {\n") + handlerContent.WriteString(" return models.IsValidStatus(fl.Field().String())\n") + handlerContent.WriteString("}\n\n") + + // Handler struct + handlerContent.WriteString("// " + data.Name + "Handler handles " + data.NameLower + " services\n") + handlerContent.WriteString("type " + data.Name + "Handler struct {\n") + handlerContent.WriteString(" db database.Service\n") + handlerContent.WriteString("}\n\n") + + // Constructor + handlerContent.WriteString("// New" + data.Name + "Handler creates a new " + data.Name + "Handler\n") + handlerContent.WriteString("func New" + data.Name + "Handler() *" + data.Name + "Handler {\n") + handlerContent.WriteString(" return &" + data.Name + "Handler{\n") + handlerContent.WriteString(" db: " + data.NameLower + "db,\n") + handlerContent.WriteString(" }\n") + handlerContent.WriteString("}\n") + + // Add optional methods + if data.HasGet { + handlerContent.WriteString(generateGetMethods(data)) + } + if data.HasDynamic { + handlerContent.WriteString(generateDynamicMethod(data)) + } + if data.HasSearch { + handlerContent.WriteString(generateSearchMethod(data)) + } + if data.HasPost { + handlerContent.WriteString(generateCreateMethod(data)) + } + if data.HasPut { + handlerContent.WriteString(generateUpdateMethod(data)) + } + if data.HasDelete { + handlerContent.WriteString(generateDeleteMethod(data)) + } + if data.HasStats { + handlerContent.WriteString(generateStatsMethod(data)) + } + + // Add helper methods + handlerContent.WriteString(generateHelperMethods(data)) + + // Write into file + writeFile(filepath.Join(handlerDir, data.NameLower+".go"), handlerContent.String()) +} + +func generateGetMethods(data HandlerData) string { + return ` + +// Get` + data.Name + ` godoc +// @Summary Get ` + data.NameLower + ` with pagination and optional aggregation +// @Description Returns a paginated list of ` + data.NamePlural + ` with optional summary statistics +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param limit query int false "Limit (max 100)" default(10) +// @Param offset query int false "Offset" default(0) +// @Param include_summary query bool false "Include aggregation summary" default(false) +// @Param status query string false "Filter by status" +// @Param search query string false "Search in multiple fields" +// @Success 200 {object} ` + data.Category + `.` + data.Name + `GetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NamePlural + ` [get] +func (h *` + data.Name + `Handler) Get` + data.Name + `(c *gin.Context) { + // Parse pagination parameters + limit, offset, err := h.parsePaginationParams(c) + if err != nil { + h.respondError(c, "Invalid pagination parameters", err, http.StatusBadRequest) + return + } + + // Parse filter parameters + filter := h.parseFilterParams(c) + includeAggregation := c.Query("include_summary") == "true" + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Create context with timeout + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute concurrent operations + var ( + items []` + data.Category + `.` + data.Name + ` + total int + aggregateData *models.AggregateData + wg sync.WaitGroup + errChan = make(chan error, 3) + mu sync.Mutex + ) + + // Fetch total count + wg.Add(1) + go func() { + defer wg.Done() + if err := h.getTotalCount(ctx, dbConn, filter, &total); err != nil { + mu.Lock() + errChan <- fmt.Errorf("failed to get total count: %w", err) + mu.Unlock() + } + }() + + // Fetch main data + wg.Add(1) + go func() { + defer wg.Done() + result, err := h.fetch` + data.Name + `s(ctx, dbConn, filter, limit, offset) + mu.Lock() + if err != nil { + errChan <- fmt.Errorf("failed to fetch data: %w", err) + } else { + items = result + } + mu.Unlock() + }() + + // Fetch aggregation data if requested + if includeAggregation { + wg.Add(1) + go func() { + defer wg.Done() + result, err := h.getAggregateData(ctx, dbConn, filter) + mu.Lock() + if err != nil { + errChan <- fmt.Errorf("failed to get aggregate data: %w", err) + } else { + aggregateData = result + } + mu.Unlock() + }() + } + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + h.logAndRespondError(c, "Data processing failed", err, http.StatusInternalServerError) + return + } + } + + // Build response + meta := h.calculateMeta(limit, offset, total) + response := ` + data.Category + `.` + data.Name + `GetResponse{ + Message: "Data ` + data.Category + ` berhasil diambil", + Data: items, + Meta: meta, + } + + if includeAggregation && aggregateData != nil { + response.Summary = aggregateData + } + + c.JSON(http.StatusOK, response) +} + +// Get` + data.Name + `ByID godoc +// @Summary Get ` + data.Name + ` by ID +// @Description Returns a single ` + data.NameLower + ` by ID +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param id path string true "` + data.Name + ` ID (UUID)" +// @Success 200 {object} ` + data.Category + `.` + data.Name + `GetByIDResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "` + data.Name + ` not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NameLower + `/{id} [get] +func (h *` + data.Name + `Handler) Get` + data.Name + `ByID(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + item, err := h.get` + data.Name + `ByID(ctx, dbConn, id) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "` + data.Name + ` not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to get ` + data.NameLower + `", err, http.StatusInternalServerError) + } + return + } + + response := ` + data.Category + `.` + data.Name + `GetByIDResponse{ + Message: "` + data.Category + ` details retrieved successfully", + Data: item, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateDynamicMethod(data HandlerData) string { + return ` + +// Get` + data.Name + `Dynamic godoc +// @Summary Get ` + data.NameLower + ` with dynamic filtering +// @Description Returns ` + data.NamePlural + ` with advanced dynamic filtering like Directus +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param fields query string false "Fields to select (e.g., fields=*.*)" +// @Param filter[column][operator] query string false "Dynamic filters (e.g., filter[name][_eq]=value)" +// @Param sort query string false "Sort fields (e.g., sort=date_created,-name)" +// @Param limit query int false "Limit" default(10) +// @Param offset query int false "Offset" default(0) +// @Success 200 {object} ` + data.Category + `.` + data.Name + `GetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NamePlural + `/dynamic [get] +func (h *` + data.Name + `Handler) Get` + data.Name + `Dynamic(c *gin.Context) { + // Parse query parameters + parser := utils.NewQueryParser().SetLimits(10, 100) + dynamicQuery, err := parser.ParseQuery(c.Request.URL.Query()) + if err != nil { + h.respondError(c, "Invalid query parameters", err, http.StatusBadRequest) + return + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Create context with timeout + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute query with dynamic filtering + items, total, err := h.fetch` + data.Name + `sDynamic(ctx, dbConn, dynamicQuery) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, total) + response := ` + data.Category + `.` + data.Name + `GetResponse{ + Message: "Data ` + data.Category + ` berhasil diambil", + Data: items, + Meta: meta, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateSearchMethod(data HandlerData) string { + return ` + +// Search` + data.Name + `Advanced provides advanced search capabilities +func (h *` + data.Name + `Handler) Search` + data.Name + `Advanced(c *gin.Context) { + // Parse complex search parameters + searchQuery := c.Query("q") + if searchQuery == "" { + h.respondError(c, "Search query is required", fmt.Errorf("empty search query"), http.StatusBadRequest) + return + } + + // Build dynamic query for search + query := utils.DynamicQuery{ + Fields: []string{"*"}, + Filters: []utils.FilterGroup{{ + Filters: []utils.DynamicFilter{ + { + Column: "status", + Operator: utils.OpNotEqual, + Value: "deleted", + }, + { + Column: "name", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + }, + LogicOp: "AND", + }}, + Sort: []utils.SortField{{ + Column: "date_created", + Order: "DESC", + }}, + Limit: 20, + Offset: 0, + } + + // Parse pagination if provided + if limit := c.Query("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { + query.Limit = l + } + } + if offset := c.Query("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute search + items, total, err := h.fetch` + data.Name + `sDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Search failed", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, total) + response := ` + data.Category + `.` + data.Name + `GetResponse{ + Message: fmt.Sprintf("Search results for '%s'", searchQuery), + Data: items, + Meta: meta, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateCreateMethod(data HandlerData) string { + return ` + +// Create` + data.Name + ` godoc +// @Summary Create ` + data.NameLower + ` +// @Description Creates a new ` + data.NameLower + ` record +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param request body ` + data.Category + `.` + data.Name + `CreateRequest true "` + data.Name + ` creation request" +// @Success 201 {object} ` + data.Category + `.` + data.Name + `CreateResponse "` + data.Name + ` created successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NamePlural + ` [post] +func (h *` + data.Name + `Handler) Create` + data.Name + `(c *gin.Context) { + var req ` + data.Category + `.` + data.Name + `CreateRequest + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + + // Validate request + if err := ` + data.NameLower + `validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + // Validate duplicate and daily submission + if err := h.validate` + data.Name + `Submission(ctx, dbConn, &req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + item, err := h.create` + data.Name + `(ctx, dbConn, &req) + if err != nil { + h.logAndRespondError(c, "Failed to create ` + data.NameLower + `", err, http.StatusInternalServerError) + return + } + + response := ` + data.Category + `.` + data.Name + `CreateResponse{ + Message: "` + data.Name + ` berhasil dibuat", + Data: item, + } + + c.JSON(http.StatusCreated, response) +}` +} + +func generateUpdateMethod(data HandlerData) string { + return ` + +// Update` + data.Name + ` godoc +// @Summary Update ` + data.NameLower + ` +// @Description Updates an existing ` + data.NameLower + ` record +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param id path string true "` + data.Name + ` ID (UUID)" +// @Param request body ` + data.Category + `.` + data.Name + `UpdateRequest true "` + data.Name + ` update request" +// @Success 200 {object} ` + data.Category + `.` + data.Name + `UpdateResponse "` + data.Name + ` updated successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 404 {object} models.ErrorResponse "` + data.Name + ` not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NameLower + `/{id} [put] +func (h *` + data.Name + `Handler) Update` + data.Name + `(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + var req ` + data.Category + `.` + data.Name + `UpdateRequest + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + + // Set ID from path parameter + req.ID = id + + // Validate request + if err := ` + data.NameLower + `validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + item, err := h.update` + data.Name + `(ctx, dbConn, &req) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "` + data.Name + ` not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to update ` + data.NameLower + `", err, http.StatusInternalServerError) + } + return + } + + response := ` + data.Category + `.` + data.Name + `UpdateResponse{ + Message: "` + data.Name + ` berhasil diperbarui", + Data: item, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateDeleteMethod(data HandlerData) string { + return ` + +// Delete` + data.Name + ` godoc +// @Summary Delete ` + data.NameLower + ` +// @Description Soft deletes a ` + data.NameLower + ` by setting status to 'deleted' +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param id path string true "` + data.Name + ` ID (UUID)" +// @Success 200 {object} ` + data.Category + `.` + data.Name + `DeleteResponse "` + data.Name + ` deleted successfully" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "` + data.Name + ` not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NameLower + `/{id} [delete] +func (h *` + data.Name + `Handler) Delete` + data.Name + `(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + err = h.delete` + data.Name + `(ctx, dbConn, id) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "` + data.Name + ` not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to delete ` + data.NameLower + `", err, http.StatusInternalServerError) + } + return + } + + response := ` + data.Category + `.` + data.Name + `DeleteResponse{ + Message: "` + data.Name + ` berhasil dihapus", + ID: id, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateStatsMethod(data HandlerData) string { + return ` + +// Get` + data.Name + `Stats godoc +// @Summary Get ` + data.NameLower + ` statistics +// @Description Returns comprehensive statistics about ` + data.NameLower + ` data +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param status query string false "Filter statistics by status" +// @Success 200 {object} models.AggregateData "Statistics data" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NamePlural + `/stats [get] +func (h *` + data.Name + `Handler) Get` + data.Name + `Stats(c *gin.Context) { + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + filter := h.parseFilterParams(c) + aggregateData, err := h.getAggregateData(ctx, dbConn, filter) + if err != nil { + h.logAndRespondError(c, "Failed to get statistics", err, http.StatusInternalServerError) + return + } + + c.JSON(http.StatusOK, gin.H{ + "message": "Statistik ` + data.NameLower + ` berhasil diambil", + "data": aggregateData, + }) +}` +} + +func generateHelperMethods(data HandlerData) string { + helperMethods := ` + +// Database operations +func (h *` + data.Name + `Handler) get` + data.Name + `ByID(ctx context.Context, dbConn *sql.DB, id string) (*` + data.Category + `.` + data.Name + `, error) { + query := "SELECT id, status, sort, user_created, date_created, user_updated, date_updated, name FROM ` + data.TableName + ` WHERE id = $1 AND status != 'deleted'" + row := dbConn.QueryRowContext(ctx, query, id) + + var item ` + data.Category + `.` + data.Name + ` + err := row.Scan(&item.ID, &item.Status, &item.Sort, &item.UserCreated, &item.DateCreated, &item.UserUpdated, &item.DateUpdated, &item.Name) + if err != nil { + return nil, err + } + + return &item, nil +} + +func (h *` + data.Name + `Handler) create` + data.Name + `(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `.` + data.Name + `CreateRequest) (*` + data.Category + `.` + data.Name + `, error) { + id := uuid.New().String() + now := time.Now() + + query := "INSERT INTO ` + data.TableName + ` (id, status, date_created, date_updated, name) VALUES ($1, $2, $3, $4, $5) RETURNING id, status, sort, user_created, date_created, user_updated, date_updated, name" + row := dbConn.QueryRowContext(ctx, query, id, req.Status, now, now, req.Name) + + var item ` + data.Category + `.` + data.Name + ` + err := row.Scan(&item.ID, &item.Status, &item.Sort, &item.UserCreated, &item.DateCreated, &item.UserUpdated, &item.DateUpdated, &item.Name) + if err != nil { + return nil, fmt.Errorf("failed to create ` + data.NameLower + `: %w", err) + } + + return &item, nil +} + +func (h *` + data.Name + `Handler) update` + data.Name + `(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `.` + data.Name + `UpdateRequest) (*` + data.Category + `.` + data.Name + `, error) { + now := time.Now() + + query := "UPDATE ` + data.TableName + ` SET status = $2, date_updated = $3, name = $4 WHERE id = $1 AND status != 'deleted' RETURNING id, status, sort, user_created, date_created, user_updated, date_updated, name" + row := dbConn.QueryRowContext(ctx, query, req.ID, req.Status, now, req.Name) + + var item ` + data.Category + `.` + data.Name + ` + err := row.Scan(&item.ID, &item.Status, &item.Sort, &item.UserCreated, &item.DateCreated, &item.UserUpdated, &item.DateUpdated, &item.Name) + if err != nil { + return nil, fmt.Errorf("failed to update ` + data.NameLower + `: %w", err) + } + + return &item, nil +} + +func (h *` + data.Name + `Handler) delete` + data.Name + `(ctx context.Context, dbConn *sql.DB, id string) error { + now := time.Now() + query := "UPDATE ` + data.TableName + ` SET status = 'deleted', date_updated = $2 WHERE id = $1 AND status != 'deleted'" + + result, err := dbConn.ExecContext(ctx, query, id, now) + if err != nil { + return fmt.Errorf("failed to delete ` + data.NameLower + `: %w", err) + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("failed to get affected rows: %w", err) + } + + if rowsAffected == 0 { + return sql.ErrNoRows + } + + return nil +} + +func (h *` + data.Name + `Handler) fetch` + data.Name + `s(ctx context.Context, dbConn *sql.DB, filter ` + data.Category + `.` + data.Name + `Filter, limit, offset int) ([]` + data.Category + `.` + data.Name + `, error) { + whereClause, args := h.buildWhereClause(filter) + query := fmt.Sprintf("SELECT id, status, sort, user_created, date_created, user_updated, date_updated, name FROM ` + data.TableName + ` WHERE %s ORDER BY date_created DESC NULLS LAST LIMIT $%d OFFSET $%d", whereClause, len(args)+1, len(args)+2) + args = append(args, limit, offset) + + rows, err := dbConn.QueryContext(ctx, query, args...) + if err != nil { + return nil, fmt.Errorf("fetch ` + data.NamePlural + ` query failed: %w", err) + } + defer rows.Close() + + items := make([]` + data.Category + `.` + data.Name + `, 0, limit) + for rows.Next() { + item, err := h.scan` + data.Name + `(rows) + if err != nil { + return nil, fmt.Errorf("scan ` + data.Name + ` failed: %w", err) + } + items = append(items, item) + } + + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("rows iteration error: %w", err) + } + + log.Printf("Successfully fetched %d ` + data.NamePlural + ` with filters applied", len(items)) + return items, nil +}` + + // Add dynamic fetch method if needed + if data.HasDynamic { + helperMethods += ` + +// fetchRetribusisDynamic executes dynamic query +func (h *` + data.Name + `Handler) fetch` + data.Name + `sDynamic(ctx context.Context, dbConn *sql.DB, query utils.DynamicQuery) ([]` + data.Category + `.` + data.Name + `, int, error) { + // Setup query builder + builder := utils.NewQueryBuilder("` + data.TableName + `"). + SetAllowedColumns([]string{ + "id", "status", "sort", "user_created", "date_created", + "user_updated", "date_updated", "name", + }) + + // Add default filter to exclude deleted records + query.Filters = append([]utils.FilterGroup{{ + Filters: []utils.DynamicFilter{{ + Column: "status", + Operator: utils.OpNotEqual, + Value: "deleted", + }}, + LogicOp: "AND", + }}, query.Filters...) + + // Execute concurrent queries + var ( + items [] ` + data.Category + `.` + data.Name + ` + total int + wg sync.WaitGroup + errChan = make(chan error, 2) + mu sync.Mutex + ) + + // Fetch total count + wg.Add(1) + go func() { + defer wg.Done() + countQuery := query + countQuery.Limit = 0 + countQuery.Offset = 0 + countSQL, countArgs, err := builder.BuildCountQuery(countQuery) + if err != nil { + errChan <- fmt.Errorf("failed to build count query: %w", err) + return + } + if err := dbConn.QueryRowContext(ctx, countSQL, countArgs...).Scan(&total); err != nil { + errChan <- fmt.Errorf("failed to get total count: %w", err) + return + } + }() + + // Fetch main data + wg.Add(1) + go func() { + defer wg.Done() + mainSQL, mainArgs, err := builder.BuildQuery(query) + if err != nil { + errChan <- fmt.Errorf("failed to build main query: %w", err) + return + } + + rows, err := dbConn.QueryContext(ctx, mainSQL, mainArgs...) + if err != nil { + errChan <- fmt.Errorf("failed to execute main query: %w", err) + return + } + defer rows.Close() + + var results []` + data.Category + `.` + data.Name + ` + for rows.Next() { + item, err := h.scan` + data.Name + `(rows) + if err != nil { + errChan <- fmt.Errorf("failed to scan ` + data.NameLower + `: %w", err) + return + } + results = append(results, item) + } + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("rows iteration error: %w", err) + return + } + + mu.Lock() + items = results + mu.Unlock() + }() + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + return nil, 0, err + } + } + + return items, total, nil +} +` + } + + helperMethods += ` +// Optimized scanning function +func (h *` + data.Name + `Handler) scan` + data.Name + `(rows *sql.Rows) (` + data.Category + `.` + data.Name + `, error) { + var item ` + data.Category + `.` + data.Name + ` + + // Scan into individual fields to handle nullable types properly + err := rows.Scan( + &item.ID, + &item.Status, + &item.Sort.Int32, &item.Sort.Valid, // models.NullableInt32 + &item.UserCreated.String, &item.UserCreated.Valid, // sql.NullString + &item.DateCreated.Time, &item.DateCreated.Valid, // sql.NullTime + &item.UserUpdated.String, &item.UserUpdated.Valid, // sql.NullString + &item.DateUpdated.Time, &item.DateUpdated.Valid, // sql.NullTime + &item.Name.String, &item.Name.Valid, // sql.NullString + ) + + return item, err +} + +func (h *` + data.Name + `Handler) getTotalCount(ctx context.Context, dbConn *sql.DB, filter ` + data.Category + `.` + data.Name + `Filter, total *int) error { + whereClause, args := h.buildWhereClause(filter) + countQuery := fmt.Sprintf("SELECT COUNT(*) FROM ` + data.TableName + ` WHERE %s", whereClause) + if err := dbConn.QueryRowContext(ctx, countQuery, args...).Scan(total); err != nil { + return fmt.Errorf("total count query failed: %w", err) + } + return nil +} + +// Get comprehensive aggregate data dengan filter support +func (h *` + data.Name + `Handler) getAggregateData(ctx context.Context, dbConn *sql.DB, filter ` + data.Category + `.` + data.Name + `Filter) (*models.AggregateData, error) { + aggregate := &models.AggregateData{ + ByStatus: make(map[string]int), + } + + // Build where clause untuk filter + whereClause, args := h.buildWhereClause(filter) + + // Use concurrent execution untuk performance + var wg sync.WaitGroup + var mu sync.Mutex + errChan := make(chan error, 4) + + // 1. Count by status + wg.Add(1) + go func() { + defer wg.Done() + statusQuery := fmt.Sprintf("SELECT status, COUNT(*) FROM ` + data.TableName + ` WHERE %s GROUP BY status ORDER BY status", whereClause) + + rows, err := dbConn.QueryContext(ctx, statusQuery, args...) + if err != nil { + errChan <- fmt.Errorf("status query failed: %w", err) + return + } + defer rows.Close() + + mu.Lock() + for rows.Next() { + var status string + var count int + if err := rows.Scan(&status, &count); err != nil { + mu.Unlock() + errChan <- fmt.Errorf("status scan failed: %w", err) + return + } + aggregate.ByStatus[status] = count + switch status { + case "active": + aggregate.TotalActive = count + case "draft": + aggregate.TotalDraft = count + case "inactive": + aggregate.TotalInactive = count + } + } + mu.Unlock() + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("status iteration error: %w", err) + } + }() + + // 2. Get last updated time dan today statistics + wg.Add(1) + go func() { + defer wg.Done() + + // Last updated + lastUpdatedQuery := fmt.Sprintf("SELECT MAX(date_updated) FROM ` + data.TableName + ` WHERE %s AND date_updated IS NOT NULL", whereClause) + var lastUpdated sql.NullTime + if err := dbConn.QueryRowContext(ctx, lastUpdatedQuery, args...).Scan(&lastUpdated); err != nil { + errChan <- fmt.Errorf("last updated query failed: %w", err) + return + } + + // Today statistics + today := time.Now().Format("2006-01-02") + todayStatsQuery := fmt.Sprintf(` + "`" + ` + SELECT + SUM(CASE WHEN DATE(date_created) = $%d THEN 1 ELSE 0 END) as created_today, + SUM(CASE WHEN DATE(date_updated) = $%d AND DATE(date_created) != $%d THEN 1 ELSE 0 END) as updated_today + FROM ` + data.TableName + ` + WHERE %s` + "`" + `, len(args)+1, len(args)+1, len(args)+1, whereClause) + + todayArgs := append(args, today) + var createdToday, updatedToday int + if err := dbConn.QueryRowContext(ctx, todayStatsQuery, todayArgs...).Scan(&createdToday, &updatedToday); err != nil { + errChan <- fmt.Errorf("today stats query failed: %w", err) + return + } + + mu.Lock() + if lastUpdated.Valid { + aggregate.LastUpdated = &lastUpdated.Time + } + aggregate.CreatedToday = createdToday + aggregate.UpdatedToday = updatedToday + mu.Unlock() + }() + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + return nil, err + } + } + + return aggregate, nil +} + +// Enhanced error handling +func (h *` + data.Name + `Handler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) { + log.Printf("[ERROR] %s: %v", message, err) + h.respondError(c, message, err, statusCode) +} + +func (h *` + data.Name + `Handler) respondError(c *gin.Context, message string, err error, statusCode int) { + errorMessage := message + if gin.Mode() == gin.ReleaseMode { + errorMessage = "Internal server error" + } + + c.JSON(statusCode, models.ErrorResponse{ + Error: errorMessage, + Code: statusCode, + Message: err.Error(), + Timestamp: time.Now(), + }) +} + +// Parse pagination parameters dengan validation yang lebih ketat +func (h *` + data.Name + `Handler) parsePaginationParams(c *gin.Context) (int, int, error) { + limit := 10 // Default limit + offset := 0 // Default offset + + if limitStr := c.Query("limit"); limitStr != "" { + parsedLimit, err := strconv.Atoi(limitStr) + if err != nil { + return 0, 0, fmt.Errorf("invalid limit parameter: %s", limitStr) + } + if parsedLimit <= 0 { + return 0, 0, fmt.Errorf("limit must be greater than 0") + } + if parsedLimit > 100 { + return 0, 0, fmt.Errorf("limit cannot exceed 100") + } + limit = parsedLimit + } + + if offsetStr := c.Query("offset"); offsetStr != "" { + parsedOffset, err := strconv.Atoi(offsetStr) + if err != nil { + return 0, 0, fmt.Errorf("invalid offset parameter: %s", offsetStr) + } + if parsedOffset < 0 { + return 0, 0, fmt.Errorf("offset cannot be negative") + } + offset = parsedOffset + } + + log.Printf("Pagination - Limit: %d, Offset: %d", limit, offset) + return limit, offset, nil +} + +func (h *` + data.Name + `Handler) parseFilterParams(c *gin.Context) ` + data.Category + `.` + data.Name + `Filter { + filter := ` + data.Category + `.` + data.Name + `Filter{} + + if status := c.Query("status"); status != "" { + if models.IsValidStatus(status) { + filter.Status = &status + } + } + + if search := c.Query("search"); search != "" { + filter.Search = &search + } + + // Parse date filters + if dateFromStr := c.Query("date_from"); dateFromStr != "" { + if dateFrom, err := time.Parse("2006-01-02", dateFromStr); err == nil { + filter.DateFrom = &dateFrom + } + } + + if dateToStr := c.Query("date_to"); dateToStr != "" { + if dateTo, err := time.Parse("2006-01-02", dateToStr); err == nil { + filter.DateTo = &dateTo + } + } + + return filter +} + +// Build WHERE clause dengan filter parameters +func (h *` + data.Name + `Handler) buildWhereClause(filter ` + data.Category + `.` + data.Name + `Filter) (string, []interface{}) { + conditions := []string{"status != 'deleted'"} + args := []interface{}{} + paramCount := 1 + + if filter.Status != nil { + conditions = append(conditions, fmt.Sprintf("status = $%d", paramCount)) + args = append(args, *filter.Status) + paramCount++ + } + + if filter.Search != nil { + searchCondition := fmt.Sprintf("name ILIKE $%d", paramCount) + conditions = append(conditions, searchCondition) + searchTerm := "%" + *filter.Search + "%" + args = append(args, searchTerm) + paramCount++ + } + + if filter.DateFrom != nil { + conditions = append(conditions, fmt.Sprintf("date_created >= $%d", paramCount)) + args = append(args, *filter.DateFrom) + paramCount++ + } + + if filter.DateTo != nil { + conditions = append(conditions, fmt.Sprintf("date_created <= $%d", paramCount)) + args = append(args, filter.DateTo.Add(24*time.Hour-time.Nanosecond)) + paramCount++ + } + + return strings.Join(conditions, " AND "), args +} + +func (h *` + data.Name + `Handler) calculateMeta(limit, offset, total int) models.MetaResponse { + totalPages := 0 + currentPage := 1 + if limit > 0 { + totalPages = (total + limit - 1) / limit // Ceiling division + currentPage = (offset / limit) + 1 + } + + return models.MetaResponse{ + Limit: limit, + Offset: offset, + Total: total, + TotalPages: totalPages, + CurrentPage: currentPage, + HasNext: offset+limit < total, + HasPrev: offset > 0, + } +} + +// validate` + data.Name + `Submission performs validation for duplicate entries and daily submission limits +func (h *` + data.Name + `Handler) validate` + data.Name + `Submission(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `.` + data.Name + `CreateRequest) error { + // Import the validation utility + validator := validation.NewDuplicateValidator(dbConn) + + // Use default configuration + config := validation.ValidationConfig{ + TableName: "` + data.TableName + `", + IDColumn: "id", + StatusColumn: "status", + DateColumn: "date_created", + ActiveStatuses: []string{"active", "draft"}, + } + + // Validate duplicate entries with active status for today + err := validator.ValidateDuplicate(ctx, config, "dummy_id") + if err != nil { + return fmt.Errorf("validation failed: %w", err) + } + + // Validate once per day submission + err = validator.ValidateOncePerDay(ctx, "` + data.TableName + `", "id", "date_created", "daily_limit") + if err != nil { + return fmt.Errorf("daily submission limit exceeded: %w", err) + } + + return nil +} + +// Example usage of the validation utility with custom configuration +func (h *` + data.Name + `Handler) validateWithCustomConfig(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `.` + data.Name + `CreateRequest) error { + // Create validator instance + validator := validation.NewDuplicateValidator(dbConn) + + // Use custom configuration + config := validation.ValidationConfig{ + TableName: "` + data.TableName + `", + IDColumn: "id", + StatusColumn: "status", + DateColumn: "date_created", + ActiveStatuses: []string{"active", "draft"}, + AdditionalFields: map[string]interface{}{ + "name": req.Name, + }, + } + + // Validate with custom fields + fields := map[string]interface{}{ + "name": *req.Name, + } + + err := validator.ValidateDuplicateWithCustomFields(ctx, config, fields) + if err != nil { + return fmt.Errorf("custom validation failed: %w", err) + } + + return nil +} + +// GetLastSubmissionTime example +func (h *` + data.Name + `Handler) getLastSubmissionTimeExample(ctx context.Context, dbConn *sql.DB, identifier string) (*time.Time, error) { + validator := validation.NewDuplicateValidator(dbConn) + return validator.GetLastSubmissionTime(ctx, "` + data.TableName + `", "id", "date_created", identifier) +}` + + return helperMethods +} + +// Keep existing functions for model generation and routes... +// (The remaining functions stay the same as in the original file) + +// ================= MODEL GENERATION ===================== +func generateModelFile(data HandlerData, modelDir string) { + // Tentukan import block + var importBlock, nullablePrefix string + + if data.Category == "models" { + importBlock = `import ( + "database/sql" + "encoding/json" + "time" +) +` + } else { + nullablePrefix = "models." + importBlock = `import ( + "` + data.ModuleName + `/internal/models" + "database/sql" + "encoding/json" + "time" +) +` + } + + modelContent := `package ` + data.Category + ` + +` + importBlock + ` + +// ` + data.Name + ` represents the data structure for the ` + data.NameLower + ` table +// with proper null handling and optimized JSON marshaling +type ` + data.Name + ` struct { + ID string ` + "`json:\"id\" db:\"id\"`" + ` + Status string ` + "`json:\"status\" db:\"status\"`" + ` + Sort ` + nullablePrefix + "NullableInt32 `json:\"sort,omitempty\" db:\"sort\"`" + ` + UserCreated sql.NullString ` + "`json:\"user_created,omitempty\" db:\"user_created\"`" + ` + DateCreated sql.NullTime ` + "`json:\"date_created,omitempty\" db:\"date_created\"`" + ` + UserUpdated sql.NullString ` + "`json:\"user_updated,omitempty\" db:\"user_updated\"`" + ` + DateUpdated sql.NullTime ` + "`json:\"date_updated,omitempty\" db:\"date_updated\"`" + ` + Name sql.NullString ` + "`json:\"name,omitempty\" db:\"name\"`" + ` +} + +// Custom JSON marshaling untuk ` + data.Name + ` agar NULL values tidak muncul di response +func (r ` + data.Name + `) MarshalJSON() ([]byte, error) { + type Alias ` + data.Name + ` + aux := &struct { + Sort *int ` + "`json:\"sort,omitempty\"`" + ` + UserCreated *string ` + "`json:\"user_created,omitempty\"`" + ` + DateCreated *time.Time ` + "`json:\"date_created,omitempty\"`" + ` + UserUpdated *string ` + "`json:\"user_updated,omitempty\"`" + ` + DateUpdated *time.Time ` + "`json:\"date_updated,omitempty\"`" + ` + Name *string ` + "`json:\"name,omitempty\"`" + ` + *Alias + }{ + Alias: (*Alias)(&r), + } + + if r.Sort.Valid { + sort := int(r.Sort.Int32) + aux.Sort = &sort + } + if r.UserCreated.Valid { + aux.UserCreated = &r.UserCreated.String + } + if r.DateCreated.Valid { + aux.DateCreated = &r.DateCreated.Time + } + if r.UserUpdated.Valid { + aux.UserUpdated = &r.UserUpdated.String + } + if r.DateUpdated.Valid { + aux.DateUpdated = &r.DateUpdated.Time + } + if r.Name.Valid { + aux.Name = &r.Name.String + } + return json.Marshal(aux) +} + +// Helper methods untuk mendapatkan nilai yang aman +func (r *` + data.Name + `) GetName() string { + if r.Name.Valid { + return r.Name.String + } + return "" +} +` + + // Add request/response structs based on enabled methods + if data.HasGet { + modelContent += ` + +// Response struct untuk GET by ID +type ` + data.Name + `GetByIDResponse struct { + Message string ` + "`json:\"message\"`" + ` + Data *` + data.Name + ` ` + "`json:\"data\"`" + ` +} + +// Enhanced GET response dengan pagination dan aggregation +type ` + data.Name + `GetResponse struct { + Message string ` + "`json:\"message\"`" + ` + Data []` + data.Name + ` ` + "`json:\"data\"`" + ` + Meta ` + nullablePrefix + "MetaResponse `json:\"meta\"`" + ` + Summary *` + nullablePrefix + "AggregateData `json:\"summary,omitempty\"`" + ` +} +` + } + if data.HasPost { + modelContent += ` + +// Request struct untuk create +type ` + data.Name + `CreateRequest struct { + Status string ` + "`json:\"status\" validate:\"required,oneof=draft active inactive\"`" + ` + Name *string ` + "`json:\"name,omitempty\" validate:\"omitempty,min=1,max=255\"`" + ` +} + +// Response struct untuk create +type ` + data.Name + `CreateResponse struct { + Message string ` + "`json:\"message\"`" + ` + Data *` + data.Name + ` ` + "`json:\"data\"`" + ` +} +` + } + if data.HasPut { + modelContent += ` + +// Update request +type ` + data.Name + `UpdateRequest struct { + ID string ` + "`json:\"-\" validate:\"required,uuid4\"`" + ` + Status string ` + "`json:\"status\" validate:\"required,oneof=draft active inactive\"`" + ` + Name *string ` + "`json:\"name,omitempty\" validate:\"omitempty,min=1,max=255\"`" + ` +} + +// Response struct untuk update +type ` + data.Name + `UpdateResponse struct { + Message string ` + "`json:\"message\"`" + ` + Data *` + data.Name + ` ` + "`json:\"data\"`" + ` +} +` + } + if data.HasDelete { + modelContent += ` + +// Response struct untuk delete +type ` + data.Name + `DeleteResponse struct { + Message string ` + "`json:\"message\"`" + ` + ID string ` + "`json:\"id\"`" + ` +} +` + } + // Add filter struct + modelContent += ` + +// Filter struct untuk query parameters +type ` + data.Name + `Filter struct { + Status *string ` + "`json:\"status,omitempty\" form:\"status\"`" + ` + Search *string ` + "`json:\"search,omitempty\" form:\"search\"`" + ` + DateFrom *time.Time ` + "`json:\"date_from,omitempty\" form:\"date_from\"`" + ` + DateTo *time.Time ` + "`json:\"date_to,omitempty\" form:\"date_to\"`" + ` +} +` + writeFile(filepath.Join(modelDir, data.NameLower+".go"), modelContent) +} + +// ================= ROUTES GENERATION ===================== +func updateRoutesFile(data HandlerData) { + routesFile := "internal/routes/v1/routes.go" + content, err := os.ReadFile(routesFile) + if err != nil { + fmt.Printf("โš ๏ธ Could not read routes.go: %v\n", err) + fmt.Printf("๐Ÿ“ Please manually add these routes to your routes.go file:\n") + printRoutesSample(data) + return + } + + routesContent := string(content) + + // Build import path + var importPath, importAlias string + if data.Category != "models" { + importPath = fmt.Sprintf("%s/internal/handlers/"+data.Category, data.ModuleName) + importAlias = data.Category + data.Name + "Handlers" + } else { + importPath = fmt.Sprintf("%s/internal/handlers", data.ModuleName) + importAlias = data.NameLower + "Handlers" + } + + // Add import + importPattern := fmt.Sprintf("%s \"%s\"", importAlias, importPath) + if !strings.Contains(routesContent, importPattern) { + importToAdd := fmt.Sprintf("\t%s \"%s\"", importAlias, importPath) + if strings.Contains(routesContent, "import (") { + routesContent = strings.Replace(routesContent, "import (", + "import (\n"+importToAdd, 1) + } + } + + // Build new routes in protected group format + newRoutes := generateProtectedRouteBlock(data) + + // Insert above protected routes marker + insertMarker := "// ============= PUBLISHED ROUTES ===============================================" + if strings.Contains(routesContent, insertMarker) { + if !strings.Contains(routesContent, fmt.Sprintf("New%sHandler", data.Name)) { + // Insert before the marker + routesContent = strings.Replace(routesContent, insertMarker, + newRoutes+"\n\t"+insertMarker, 1) + } else { + fmt.Printf("โœ… Routes for %s already exist, skipping...\n", data.Name) + return + } + } else { + // Fallback: insert at end of setupV1Routes function + setupFuncEnd := "\treturn r" + if strings.Contains(routesContent, setupFuncEnd) { + routesContent = strings.Replace(routesContent, setupFuncEnd, + newRoutes+"\n\n\t"+setupFuncEnd, 1) + } + } + + if err := os.WriteFile(routesFile, []byte(routesContent), 0644); err != nil { + fmt.Printf("Error writing routes.go: %v\n", err) + return + } + + fmt.Printf("โœ… Updated routes.go with %s endpoints\n", data.Name) +} + +func generateProtectedRouteBlock(data HandlerData) string { + // fmt.Printf("๐Ÿ“ Group Part: %s\n", groupPath) + var sb strings.Builder + var importPath, groupPath string + if data.Category != "models" { + importPath = data.Category + data.Name + groupPath = strings.ToLower(data.Category) + "/" + data.NameLower + } else { + importPath = data.NameLower + groupPath = data.NameLower + } + // Komentar dan deklarasi handler & grup + sb.WriteString("// ") + sb.WriteString(data.Name) + sb.WriteString(" endpoints\n") + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Handler := ") + sb.WriteString(importPath) + sb.WriteString("Handlers.New") + sb.WriteString(data.Name) + sb.WriteString("Handler()\n ") + sb.WriteString(importPath) + + sb.WriteString("Group := v1.Group(\"/") + sb.WriteString(groupPath) + sb.WriteString("\")\n {\n ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString(")\n") + + if data.HasDynamic { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"/dynamic\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("Dynamic) // Route baru\n") + } + if data.HasSearch { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"/search\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Search") + sb.WriteString(data.Name) + sb.WriteString("Advanced) // Route pencarian\n") + } + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"/:id\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("ByID)\n") + + if data.HasPost { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.POST(\"\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Create") + sb.WriteString(data.Name) + sb.WriteString(")\n") + } + if data.HasPut { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.PUT(\"/:id\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Update") + sb.WriteString(data.Name) + sb.WriteString(")\n") + } + if data.HasDelete { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.DELETE(\"/:id\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Delete") + sb.WriteString(data.Name) + sb.WriteString(")\n") + } + if data.HasStats { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"/stats\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("Stats)\n") + } + sb.WriteString(" }\n") + return sb.String() +} + +func printRoutesSample(data HandlerData) { + fmt.Print(generateProtectedRouteBlock(data)) + fmt.Println() +} + +// ================= UTILITY FUNCTIONS ===================== +func writeFile(filename, content string) { + if err := os.WriteFile(filename, []byte(content), 0644); err != nil { + fmt.Printf("โŒ Error creating file %s: %v\n", filename, err) + return + } + fmt.Printf("โœ… Generated: %s\n", filename) +}