commit 5460259426fff2c9808dec233a27ceb041fbecbd Author: Meninjar Mulyono Date: Wed Sep 24 20:47:09 2025 +0700 first commit diff --git a/.air.toml b/.air.toml new file mode 100644 index 0000000..f127ea0 --- /dev/null +++ b/.air.toml @@ -0,0 +1,46 @@ +root = "." +testdata_dir = "testdata" +tmp_dir = "tmp" + +[build] + args_bin = [] + bin = ".\\main.exe" + cmd = "make build" + delay = 1000 + exclude_dir = ["assets", "tmp", "vendor", "testdata", "node_modules"] + exclude_file = [] + exclude_regex = ["_test.go"] + exclude_unchanged = false + follow_symlink = false + full_bin = "" + include_dir = [] + include_ext = ["go", "tpl", "tmpl", "html"] + include_file = [] + kill_delay = "0s" + log = "build-errors.log" + poll = false + poll_interval = 0 + post_cmd = [] + pre_cmd = [] + rerun = false + rerun_delay = 500 + send_interrupt = false + stop_on_error = false + +[color] + app = "" + build = "yellow" + main = "magenta" + runner = "green" + watcher = "cyan" + +[log] + main_only = false + time = false + +[misc] + clean_on_exit = false + +[screen] + clear_on_rebuild = false + keep_scroll = true diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5cae497 --- /dev/null +++ b/.gitignore @@ -0,0 +1,34 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with "go test -c" +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work +tmp/ + +# IDE specific files +.vscode +.idea + +# .env file +.env + +# Project build +main +*templ.go + +# OS X generated file +.DS_Store + diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 0000000..e001bf5 --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,42 @@ +version: 2 +before: + hooks: + - go mod tidy + +env: + - PACKAGE_PATH=github.com///cmd + +builds: +- binary: "{{ .ProjectName }}" + main: ./cmd/api + goos: + - darwin + - linux + - windows + goarch: + - amd64 + - arm64 + env: + - CGO_ENABLED=0 + ldflags: + - -s -w -X {{.Env.PACKAGE_PATH}}={{.Version}} +release: + prerelease: auto + +universal_binaries: +- replace: true + +archives: + - name_template: > + {{- .ProjectName }}_{{- .Version }}_{{- title .Os }}_{{- if eq .Arch "amd64" }}x86_64{{- else if eq .Arch "386" }}i386{{- else }}{{ .Arch }}{{ end }}{{- if .Arm }}v{{ .Arm }}{{ end -}} + format_overrides: + - goos: windows + format: zip + builds_info: + group: root + owner: root + files: + - README.md + +checksum: + name_template: 'checksums.txt' diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..4b4c9ce --- /dev/null +++ b/Dockerfile @@ -0,0 +1,19 @@ +FROM golang:1.24.4-alpine AS build + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +RUN go build -o main cmd/api/main.go + +FROM alpine:3.20.1 AS prod +WORKDIR /app +COPY --from=build /app/main /app/main +COPY --from=build /app/.env /app/.env +EXPOSE 8080 +CMD ["./main"] + + diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..0c2392a --- /dev/null +++ b/Makefile @@ -0,0 +1,49 @@ +# Simple Makefile for a Go project + +# Build the application +all: build test + +build: + @echo "Building..." + + + @go build -o main.exe cmd/api/main.go + +# Run the application +run: + @go run cmd/api/main.go +# Create DB container +docker-run: + @docker compose up --build + +# Shutdown DB container +docker-down: + @docker compose down + +# Test the application +test: + @echo "Testing..." + @go test ./... -v +# Integrations Tests for the application +itest: + @echo "Running integration tests..." + @go test ./internal/database -v + +# Clean the binary +clean: + @echo "Cleaning..." + @rm -f main + +# Live Reload +watch: + @powershell -ExecutionPolicy Bypass -Command "if (Get-Command air -ErrorAction SilentlyContinue) { \ + air; \ + Write-Output 'Watching...'; \ + } else { \ + Write-Output 'Installing air...'; \ + go install github.com/air-verse/air@latest; \ + air; \ + Write-Output 'Watching...'; \ + }" + +.PHONY: all build run test clean watch docker-run docker-down itest diff --git a/README.md b/README.md new file mode 100644 index 0000000..5d437a1 --- /dev/null +++ b/README.md @@ -0,0 +1,517 @@ + +# ๐Ÿš€ BPJS & Retribusi Management API + +> **Sistem manajemen data BPJS dan retribusi berbasis API dengan arsitektur modern untuk integrasi layanan kesehatan dan manajemen tarif** + +## ๐Ÿ“‘ Daftar Isi + +- [โœจ Fitur Utama](#-fitur-utama) +- [๐Ÿ—๏ธ Arsitektur](#%EF%B8%8F-arsitektur) +- [โšก Quick Start](#-quick-start) +- [๐Ÿ” Autentikasi](#-autentikasi) +- [๐Ÿ“Š API Endpoints](#-api-endpoints) +- [๐Ÿ› ๏ธ Development](#%EF%B8%8F-development) +- [๐Ÿš€ Deployment](#-deployment) +- [๐Ÿ“š Dokumentasi](#-dokumentasi) + +*** + +## โœจ Fitur Utama + +### Core Features + +- **๐Ÿฅ BPJS Integration** - Integrasi dengan layanan BPJS Kesehatan +- **๐Ÿ’ฐ Retribusi Management** - Manajemen tarif dan retribusi dinas +- **๐Ÿ”’ JWT Authentication** - Autentikasi dengan Keycloak integration +- **๐Ÿ” Dynamic Filtering** - Filter dan pencarian data retribusi +- **๐Ÿ“Š Health Monitoring** - Monitoring kesehatan sistem +- **๐Ÿ“– API Documentation** - Dokumentasi Swagger yang interaktif + +### Developer Experience + +- **๐Ÿ”ฅ Hot Reload** - Development dengan auto-restart +- **๐Ÿณ Docker Ready** - Deployment dengan container +- **โšก Code Generator** - Generator handler otomatis +- **๐Ÿ—„๏ธ Multi Database** - Support PostgreSQL, MySQL, MongoDB +- **๐Ÿ“ˆ Real-time Updates** - WebSocket notifications + +*** + +## ๐Ÿ—๏ธ Arsitektur + +### Clean Architecture Layers + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Presentation Layer โ”‚ โ† handlers/, routes/ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ Application Layer โ”‚ โ† middleware/, validators/ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ Domain Layer โ”‚ โ† models/, interfaces/ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ Infrastructure Layer โ”‚ โ† database/, external APIs +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + + +### Struktur Project + +``` +bpjs-retribusi-api/ +โ”œโ”€โ”€ ๐Ÿ“ cmd/ +โ”‚ โ””โ”€โ”€ api/main.go # ๐Ÿšช Entry point aplikasi +โ”œโ”€โ”€ ๐Ÿ“ internal/ # ๐Ÿ  Core business logic +โ”‚ โ”œโ”€โ”€ handlers/ # ๐ŸŽฎ HTTP controllers +โ”‚ โ”‚ โ”œโ”€โ”€ auth/ # ๐Ÿ” Authentication handlers +โ”‚ โ”‚ โ”œโ”€โ”€ peserta/ # ๐Ÿฅ BPJS participant handlers +โ”‚ โ”‚ โ”œโ”€โ”€ retribusi/ # ๐Ÿ’ฐ Retribusi handlers +โ”‚ โ”‚ โ””โ”€โ”€ healthcheck/ # ๐Ÿ’š Health check handlers +โ”‚ โ”œโ”€โ”€ middleware/ # ๐Ÿ›ก๏ธ Auth & validation middleware +โ”‚ โ”œโ”€โ”€ models/ # ๐Ÿ“Š Data structures +โ”‚ โ”œโ”€โ”€ routes/ # ๐Ÿ›ฃ๏ธ API routing +โ”‚ โ”œโ”€โ”€ services/ # ๐Ÿ’ผ Business logic services +โ”‚ โ””โ”€โ”€ database/ # ๐Ÿ’พ Database connections +โ”œโ”€โ”€ ๐Ÿ“ tools/ # ๐Ÿ”ง Development tools +โ”‚ โ”œโ”€โ”€ general/ # ๐ŸŽฏ General generators +โ”‚ โ””โ”€โ”€ bpjs/ # ๐Ÿฅ BPJS specific tools +โ”œโ”€โ”€ ๐Ÿ“ docs/ # ๐Ÿ“š Documentation +โ””โ”€โ”€ ๐Ÿ“ scripts/ # ๐Ÿ“œ Automation scripts +``` + + +*** + +## โšก Quick Start + +### 1๏ธโƒฃ Setup Environment (2 menit) + +```bash +# Clone repository +git clone +cd bpjs-retribusi-api + +# Setup environment +cp example.env .env +``` + +### 2๏ธโƒฃ Pilih Method Setup + +**๐Ÿณ Docker (Recommended)** + +```bash +make docker-run +``` + +**๐Ÿ”ง Manual Setup** + +```bash +# Install dependencies +go mod download + +# Start server +go run cmd/api/main.go +``` + +### 3๏ธโƒฃ Verify Installation + +| Service | URL | Status | +| :-- | :-- | :-- | +| **API** | http://localhost:8080/api/v1 | โœ… | +| **Swagger** | http://localhost:8080/swagger/index.html | ๐Ÿ“– | +| **Health Check** | http://localhost:8080/api/sistem/health | ๐Ÿ’š | + + +*** + +## ๐Ÿ” Autentikasi + +### Login \& Mendapatkan Token + +```bash +curl -X POST http://localhost:8080/api/v1/auth/login \ + -H "Content-Type: application/json" \ + -d '{ + "username": "admin", + "password": "password" + }' +``` + +**Response:** + +```json +{ + "access_token": "eyJhbGciOiJIUzI1NiIs...", + "expires_in": 3600, + "user": { + "id": "123", + "username": "admin", + "role": "admin" + } +} +``` + + +### Menggunakan Token + +```bash +curl -X GET http://localhost:8080/api/v1/products \ + -H "Authorization: Bearer " +``` + + +### Demo Accounts + +| Username | Password | Role | Akses | +| :-- | :-- | :-- | :-- | +| `admin` | `password` | Admin | Semua endpoint | +| `user` | `password` | User | Read-only | + + +*** + +## ๐Ÿ“Š API Endpoints + +### ๐ŸŒ Public Endpoints + +| Method | Endpoint | Deskripsi | +| :-- | :-- | :-- | +| `POST` | `/api/v1/auth/login` | Login pengguna | +| `POST` | `/api/v1/auth/register` | Registrasi pengguna baru | +| `GET` | `/api/sistem/health` | Status kesehatan API | +| `GET` | `/api/sistem/info` | Informasi sistem | + +### ๐Ÿ”’ Protected Endpoints + +#### BPJS Integration + +| Method | Endpoint | Deskripsi | +| :-- | :-- | :-- | +| `GET` | `/api/v1/Peserta/nokartu/:nokartu` | Data peserta berdasarkan nomor kartu | +| `GET` | `/api/v1/Peserta/nik/:nik` | Data peserta berdasarkan NIK | + +#### Retribusi Management + +| Method | Endpoint | Deskripsi | +| :-- | :-- | :-- | +| `GET` | `/api/v1/retribusi` | List semua retribusi | +| `GET` | `/api/v1/retribusi/dynamic` | Query dengan filter dinamis | +| `GET` | `/api/v1/retribusi/search` | Pencarian retribusi | +| `GET` | `/api/v1/retribusi/id/:id` | Detail retribusi by ID | +| `POST` | `/api/v1/retribusi` | Buat retribusi baru | +| `PUT` | `/api/v1/retribusi/id/:id` | Update retribusi | +| `DELETE` | `/api/v1/retribusi/id/:id` | Hapus retribusi | + +#### Dynamic Query Examples + +**Filter berdasarkan jenis:** + +```bash +GET /api/v1/retribusi/dynamic?filter[Jenis][_eq]=RETRIBUSI PELAYANAN KESEHATAN +``` + +**Kombinasi filter:** + +```bash +GET /api/v1/retribusi/dynamic?filter[status][_eq]=active&filter[Tarif][_gt]=100000 +``` + +**Pagination dan sorting:** + +```bash +GET /api/v1/retribusi/dynamic?sort=-date_created&limit=10&offset=20 +``` + +**Advanced search:** + +```bash +GET /api/v1/retribusi/search?q=keyword&limit=20&offset=0 +``` + + +*** + +## ๐Ÿ› ๏ธ Development + +### Code Generation (30 detik) + +**๐ŸŽฏ Generate CRUD Lengkap** + +```bash +# Generate handler untuk entity baru +go run tools/general/generate-handler.go retribusi get post put delete + +# Generate dengan fitur advanced +go run tools/general/generate-handler.go peserta get post put delete dynamic search stats +``` + +**๐Ÿฅ Generate BPJS Handler** + +```bash +# Single service +go run tools/bpjs/generate-bpjs-handler.go tools/bpjs/reference/peserta get + +# Semua service dari config +go run tools/bpjs/generate-handler.go tools/bpjs/services-config-bpjs.yaml +``` + + +### Development Commands + +```bash +# ๐Ÿ”ฅ Development dengan hot reload +make watch + +# ๐Ÿงช Testing +make test # Unit tests +make itest # Integration tests +make test-all # Semua tests + +# ๐Ÿ“– Update dokumentasi +make docs # Generate Swagger docs + +# ๐Ÿ” Code quality +make lint # Linting +make format # Format code +``` + + +### Environment Configuration + +**๐Ÿ“ .env File:** + +```bash +# Database +BLUEPRINT_DB_HOST=localhost +BLUEPRINT_DB_PORT=5432 +BLUEPRINT_DB_USERNAME=postgres +BLUEPRINT_DB_PASSWORD=postgres +BLUEPRINT_DB_DATABASE=api_service + +# JWT +JWT_SECRET=your-super-secret-key-change-in-production + +# External APIs +BPJS_BASE_URL=https://api.bpjs-kesehatan.go.id +SATUSEHAT_BASE_URL=https://api.satusehat.kemkes.go.id + +# Application +APP_ENV=development +APP_PORT=8080 +LOG_LEVEL=debug +``` + + +*** + +## ๐Ÿš€ Deployment + +### ๐Ÿณ Docker Deployment + +**Development:** + +```bash +# Start semua services +make docker-run + +# Stop services +make docker-down + +# Rebuild dan restart +make docker-rebuild +``` + +**Production:** + +```bash +# Build production image +docker build -t api-service:prod . + +# Run production container +docker run -d \ + --name api-service \ + -p 8080:8080 \ + --env-file .env.prod \ + api-service:prod +``` + + +### ๐Ÿ”ง Manual Deployment + +```bash +# Build aplikasi +make build + +# Run migrations +./scripts/migrate.sh up + +# Start server +./bin/api-service +``` + + +*** + +## ๐Ÿ“š Dokumentasi + +### ๐Ÿ“– Interactive API Documentation + +Kunjungi **Swagger UI** di: http://localhost:8080/swagger/index.html + +**Cara menggunakan:** + +1. ๐Ÿ”‘ Login melalui `/auth/login` endpoint +2. ๐Ÿ“‹ Copy token dari response +3. ๐Ÿ”“ Klik tombol "Authorize" di Swagger +4. ๐Ÿ“ Masukkan: `Bearer ` +5. โœ… Test semua endpoint yang tersedia + +### ๐Ÿงช Testing Examples + +**JavaScript/Axios:** + +```javascript +// Login dan set token +const auth = await axios.post('/api/v1/auth/login', { + username: 'admin', + password: 'password' +}); + +axios.defaults.headers.common['Authorization'] = + `Bearer ${auth.data.access_token}`; + +// Fetch BPJS participant data +const peserta = await axios.get('/api/v1/Peserta/nokartu/1234567890123456'); +console.log(peserta.data); + +// Fetch retribusi data +const retribusi = await axios.get('/api/v1/retribusi'); +console.log(retribusi.data); +``` + +**cURL Examples:** + +```bash +# Login +TOKEN=$(curl -s -X POST http://localhost:8080/api/v1/auth/login \ + -H "Content-Type: application/json" \ + -d '{"username":"admin","password":"password"}' | jq -r '.access_token') + +# Get BPJS participant data +curl -H "Authorization: Bearer $TOKEN" \ + http://localhost:8080/api/v1/Peserta/nokartu/1234567890123456 + +# Get retribusi data +curl -H "Authorization: Bearer $TOKEN" \ + http://localhost:8080/api/v1/retribusi +``` + + +### ๐Ÿ” Health Monitoring + +```bash +# Basic health check +curl http://localhost:8080/api/v1/health + +# Detailed system info +curl http://localhost:8080/api/v1/health/detailed +``` + +**Response:** + +```json +{ + "status": "healthy", + "timestamp": "2025-09-10T05:39:00Z", + "services": { + "database": "connected", + "bpjs_api": "accessible", + "satusehat_api": "accessible" + }, + "version": "1.0.0" +} +``` + + +*** + +## ๐Ÿšจ Troubleshooting + +### Masalah Umum + +**โŒ Database Connection Error** + +```bash +# Cek status PostgreSQL +make db-status + +# Reset database +make db-reset + +# Check logs +make logs-db +``` + +**โŒ Generate Handler Gagal** + +- โœ… Pastikan berada di root project +- โœ… Cek permission write di folder `internal/` +- โœ… Verifikasi file `internal/routes/v1/routes.go` exists + +**โŒ Token Invalid/Expired** + +- ๐Ÿ”„ Login ulang untuk mendapatkan token baru +- โฐ Token expire dalam 1 jam (configurable) +- ๐Ÿ“ Format harus: `Bearer ` + +**โŒ Import Error saat Generate** + +- ๐Ÿงน Jalankan: `go mod tidy` +- ๐Ÿ”„ Restart development server +- ๐Ÿ“ Cek format import di generated files + + +### Debug Mode + +```bash +# Enable debug logging +export LOG_LEVEL=debug + +# Run dengan verbose output +make run-debug + +# Monitor performance +make monitor +``` + + +*** + +## ๐ŸŽฏ Next Steps + +### ๐Ÿ“‹ Development Roadmap + +- [ ] โœ… **Setup environment selesai** +- [ ] โœ… **Generate handler pertama** +- [ ] โœ… **Test dengan Swagger** +- [ ] ๐Ÿ”„ **Implementasi business logic** +- [ ] ๐Ÿ”„ **Tambahkan unit tests** +- [ ] ๐Ÿ”„ **Setup CI/CD pipeline** +- [ ] ๐Ÿ”„ **Deploy ke production** + + +### ๐Ÿš€ Advanced Features + +- **๐Ÿ“Š Monitoring \& Observability** +- **๐Ÿ”’ Enhanced Security (Rate limiting, CORS)** +- **๐Ÿ“ˆ Performance Optimization** +- **๐ŸŒ Multi-language Support** +- **๐Ÿ“ฑ Mobile SDK Integration** + +*** + +**โšก Total setup time: 5 menit | ๐Ÿ”ง Generate CRUD: 30 detik | ๐Ÿงช Testing: Langsung via Swagger** + +> **๐Ÿ’ก Pro Tip:** Gunakan `make help` untuk melihat semua command yang tersedia + +*** + diff --git a/cmd/api/main.go b/cmd/api/main.go new file mode 100644 index 0000000..8f4e5c3 --- /dev/null +++ b/cmd/api/main.go @@ -0,0 +1,86 @@ +package main + +import ( + "context" + "fmt" + "log" + "net/http" + "os/signal" + "syscall" + "time" + + "api-service/internal/server" + + "github.com/joho/godotenv" // Import the godotenv package + + _ "api-service/docs" +) + +// @title API Service +// @version 1.0.0 +// @description A comprehensive Go API service with Swagger documentation +// @termsOfService http://swagger.io/terms/ + +// @contact.name API Support +// @contact.url http://www.swagger.io/support +// @contact.email support@swagger.io + +// @license.name Apache 2.0 +// @license.url http://www.apache.org/licenses/LICENSE-2.0.html + +// @host localhost:8080 +// @BasePath /api/v1 +// @schemes http https + +func gracefulShutdown(apiServer *http.Server, done chan bool) { + // Create context that listens for the interrupt signal from the OS. + ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) + defer stop() + + // Listen for the interrupt signal. + <-ctx.Done() + + log.Println("shutting down gracefully, press Ctrl+C again to force") + stop() // Allow Ctrl+C to force shutdown + + // The context is used to inform the server it has 5 seconds to finish + // the request it is currently handling + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + if err := apiServer.Shutdown(ctx); err != nil { + log.Printf("Server forced to shutdown with error: %v", err) + } + + log.Println("Server exiting") + + // Notify the main goroutine that the shutdown is complete + done <- true +} + +func main() { + log.Println("Starting API Service...") + + // Load environment variables from .env file + if err := godotenv.Load(); err != nil { + log.Printf("Warning: .env file not found or could not be loaded: %v", err) + log.Println("Continuing with system environment variables...") + } + + server := server.NewServer() + + // Create a done channel to signal when the shutdown is complete + done := make(chan bool, 1) + + // Run graceful shutdown in a separate goroutine + go gracefulShutdown(server, done) + + log.Printf("Server starting on port %s", server.Addr) + err := server.ListenAndServe() + if err != nil && err != http.ErrServerClosed { + panic(fmt.Sprintf("http server error: %s", err)) + } + + // Wait for the graceful shutdown to complete + <-done + log.Println("Graceful shutdown complete.") +} diff --git a/cmd/logging/main.go b/cmd/logging/main.go new file mode 100644 index 0000000..97774de --- /dev/null +++ b/cmd/logging/main.go @@ -0,0 +1,109 @@ +package main + +import ( + "fmt" + "log" + "time" + + "api-service/pkg/logger" +) + +func main() { + fmt.Println("Testing Dynamic Logging Functions...") + fmt.Println("====================================") + + // Test fungsi penyimpanan log dinamis + testDynamicLogging() + + // Tunggu sebentar untuk memastikan goroutine selesai + time.Sleep(500 * time.Millisecond) + + fmt.Println("\n====================================") + fmt.Println("Dynamic logging test completed!") + fmt.Println("Check the log files in pkg/logger/data/ directory") +} + +func testDynamicLogging() { + // Buat logger instance + loggerInstance := logger.New("test-app", logger.DEBUG, false) + + // Test 1: Log dengan penyimpanan otomatis + fmt.Println("\n1. Testing automatic log saving...") + loggerInstance.LogAndSave(logger.INFO, "Application started successfully", map[string]interface{}{ + "version": "1.0.0", + "build_date": time.Now().Format("2006-01-02"), + "environment": "development", + }) + + // Test 2: Log dengan request context + fmt.Println("\n2. Testing log with request context...") + requestLogger := loggerInstance.WithRequestID("req-001").WithCorrelationID("corr-001") + requestLogger.LogAndSave(logger.INFO, "User login attempt", map[string]interface{}{ + "username": "john_doe", + "ip": "192.168.1.100", + "success": true, + }) + + // Test 3: Error logging + fmt.Println("\n3. Testing error logging...") + loggerInstance.LogAndSave(logger.ERROR, "Database connection failed", map[string]interface{}{ + "error": "connection timeout", + "retry_count": 3, + "host": "db.example.com:5432", + }) + + // Test 4: Manual log entry saving + fmt.Println("\n4. Testing manual log entry saving...") + manualEntry := logger.LogEntry{ + Timestamp: time.Now().Format(time.RFC3339), + Level: "DEBUG", + Service: "manual-test", + Message: "Manual log entry created", + RequestID: "manual-req-001", + CorrelationID: "manual-corr-001", + File: "main.go", + Line: 42, + Fields: map[string]interface{}{ + "custom_field": "test_value", + "number": 123, + "active": true, + }, + } + + // Simpan manual ke berbagai format + if err := logger.SaveLogText(manualEntry); err != nil { + log.Printf("Error saving text log: %v", err) + } else { + fmt.Println("โœ“ Text log saved successfully") + } + + if err := logger.SaveLogJSON(manualEntry); err != nil { + log.Printf("Error saving JSON log: %v", err) + } else { + fmt.Println("โœ“ JSON log saved successfully") + } + + if err := logger.SaveLogToDatabase(manualEntry); err != nil { + log.Printf("Error saving database log: %v", err) + } else { + fmt.Println("โœ“ Database log saved successfully") + } + + // Test 5: Performance logging dengan durasi + fmt.Println("\n5. Testing performance logging...") + start := time.Now() + + // Simulasi proses yang memakan waktu + time.Sleep(200 * time.Millisecond) + + duration := time.Since(start) + loggerInstance.LogAndSave(logger.INFO, "Data processing completed", map[string]interface{}{ + "operation": "data_import", + "duration": duration.String(), + "duration_ms": duration.Milliseconds(), + "records": 1000, + "throughput": fmt.Sprintf("%.2f records/ms", 1000/float64(duration.Milliseconds())), + }) + + fmt.Println("\nโœ“ All logging tests completed successfully!") +} diff --git a/diagnostic/main.go b/diagnostic/main.go new file mode 100644 index 0000000..e1c5a2c --- /dev/null +++ b/diagnostic/main.go @@ -0,0 +1,130 @@ +package main + +import ( + "database/sql" + "fmt" + "log" + "os" + + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/joho/godotenv" +) + +func main() { + fmt.Println("=== Database Connection Diagnostic Tool ===") + + // Load environment variables from .env file + if err := godotenv.Load(); err != nil { + log.Printf("Warning: Error loading .env file: %v", err) + } + + // Get configuration from environment + host := os.Getenv("DB_HOST") + port := os.Getenv("DB_PORT") + username := os.Getenv("DB_USERNAME") + password := os.Getenv("DB_PASSWORD") + database := os.Getenv("DB_DATABASE") + sslmode := os.Getenv("DB_SSLMODE") + + if sslmode == "" { + sslmode = "disable" + } + + fmt.Printf("Host: %s\n", host) + fmt.Printf("Port: %s\n", port) + fmt.Printf("Username: %s\n", username) + fmt.Printf("Database: %s\n", database) + fmt.Printf("SSL Mode: %s\n", sslmode) + + if host == "" || username == "" || password == "" { + fmt.Println("โŒ Missing required environment variables") + return + } + + // Test connection to PostgreSQL server + fmt.Println("\n--- Testing PostgreSQL Server Connection ---") + serverConnStr := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=postgres sslmode=%s", + host, port, username, password, sslmode) + + db, err := sql.Open("pgx", serverConnStr) + if err != nil { + fmt.Printf("โŒ Failed to connect to PostgreSQL server: %v\n", err) + return + } + defer db.Close() + + err = db.Ping() + if err != nil { + fmt.Printf("โŒ Failed to ping PostgreSQL server: %v\n", err) + return + } + + fmt.Println("โœ… Successfully connected to PostgreSQL server") + + // Check if database exists + fmt.Println("\n--- Checking Database Existence ---") + var exists bool + err = db.QueryRow("SELECT EXISTS(SELECT 1 FROM pg_database WHERE datname = $1)", database).Scan(&exists) + if err != nil { + fmt.Printf("โŒ Failed to check database existence: %v\n", err) + return + } + + if !exists { + fmt.Printf("โŒ Database '%s' does not exist\n", database) + + // List available databases + fmt.Println("\n--- Available Databases ---") + rows, err := db.Query("SELECT datname FROM pg_database WHERE datistemplate = false ORDER BY datname") + if err != nil { + fmt.Printf("โŒ Failed to list databases: %v\n", err) + return + } + defer rows.Close() + + fmt.Println("Available databases:") + for rows.Next() { + var dbName string + if err := rows.Scan(&dbName); err != nil { + continue + } + fmt.Printf(" - %s\n", dbName) + } + return + } + + fmt.Printf("โœ… Database '%s' exists\n", database) + + // Test direct connection to the database + fmt.Println("\n--- Testing Direct Database Connection ---") + directConnStr := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=%s", + host, port, username, password, database, sslmode) + + targetDB, err := sql.Open("pgx", directConnStr) + if err != nil { + fmt.Printf("โŒ Failed to connect to database '%s': %v\n", database, err) + return + } + defer targetDB.Close() + + err = targetDB.Ping() + if err != nil { + fmt.Printf("โŒ Failed to ping database '%s': %v\n", database, err) + return + } + + fmt.Printf("โœ… Successfully connected to database '%s'\n", database) + + // Test basic query + fmt.Println("\n--- Testing Basic Query ---") + var version string + err = targetDB.QueryRow("SELECT version()").Scan(&version) + if err != nil { + fmt.Printf("โŒ Failed to execute query: %v\n", err) + return + } + + fmt.Printf("โœ… PostgreSQL Version: %s\n", version) + + fmt.Println("\n๐ŸŽ‰ All tests passed! Database connection is working correctly.") +} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..17b58e9 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,194 @@ +services: + # # PostgreSQL Database + # psql_bp: + # image: postgres:15-alpine + # restart: unless-stopped + # environment: + # POSTGRES_USER: stim + # POSTGRES_PASSWORD: stim*RS54 + # POSTGRES_DB: satu_db + # ports: + # - "5432:5432" + # volumes: + # - postgres_data:/var/lib/postgresql/data + # healthcheck: + # test: ["CMD-SHELL", "pg_isready -U stim -d satu_db"] + # interval: 10s + # timeout: 5s + # retries: 5 + # networks: + # - blueprint + + # # MongoDB Database + # mongodb: + # image: mongo:7-jammy + # restart: unless-stopped + # environment: + # MONGO_INITDB_ROOT_USERNAME: admin + # MONGO_INITDB_ROOT_PASSWORD: stim*rs54 + # ports: + # - "27017:27017" + # volumes: + # - mongodb_data:/data/db + # networks: + # - blueprint + + # # MySQL Antrian Database + # mysql_antrian: + # image: mysql:8.0 + # restart: unless-stopped + # environment: + # MYSQL_ROOT_PASSWORD: www-data + # MYSQL_USER: www-data + # MYSQL_PASSWORD: www-data + # MYSQL_DATABASE: antrian_rssa + # ports: + # - "3306:3306" + # volumes: + # - mysql_antrian_data:/var/lib/mysql + # healthcheck: + # test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + # interval: 10s + # timeout: 5s + # retries: 5 + # networks: + # - blueprint + + # # MySQL Medical Database + # mysql_medical: + # image: mysql:8.0 + # restart: unless-stopped + # environment: + # MYSQL_ROOT_PASSWORD: meninjar*RS54 + # MYSQL_USER: meninjardev + # MYSQL_PASSWORD: meninjar*RS54 + # MYSQL_DATABASE: healtcare_database + # ports: + # - "3307:3306" + # volumes: + # - mysql_medical_data:/var/lib/mysql + # healthcheck: + # test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + # interval: 10s + # timeout: 5s + # retries: 5 + # networks: + # - blueprint + + # Main Application + app: + build: + context: . + dockerfile: Dockerfile + target: prod + restart: unless-stopped + ports: + - "8080:8080" + environment: + # Server Configuration + APP_ENV: production + PORT: 8080 + GIN_MODE: release + + # Default Database Configuration (PostgreSQL) + DB_CONNECTION: postgres + DB_USERNAME: stim + DB_PASSWORD: stim*RS54 + DB_HOST: 10.10.123.165 + DB_DATABASE: satu_db + DB_PORT: 5432 + DB_SSLMODE: disable + + # satudata Database Configuration (PostgreSQL) + POSTGRES_SATUDATA_CONNECTION: postgres + POSTGRES_SATUDATA_USERNAME: stim + POSTGRES_SATUDATA_PASSWORD: stim*RS54 + POSTGRES_SATUDATA_HOST: 10.10.123.165 + POSTGRES_SATUDATA_DATABASE: satu_db + POSTGRES_SATUDATA_PORT: 5432 + POSTGRES_SATUDATA_SSLMODE: disable + + # Mongo Database + MONGODB_MONGOHL7_CONNECTION: mongodb + MONGODB_MONGOHL7_HOST: 10.10.123.206 + MONGODB_MONGOHL7_PORT: 27017 + MONGODB_MONGOHL7_USER: admin + MONGODB_MONGOHL7_PASS: stim*rs54 + MONGODB_MONGOHL7_MASTER: master + MONGODB_MONGOHL7_LOCAL: local + MONGODB_MONGOHL7_SSLMODE: disable + + # MYSQL Antrian Database + # MYSQL_ANTRIAN_CONNECTION: mysql + # MYSQL_ANTRIAN_HOST: mysql_antrian + # MYSQL_ANTRIAN_USERNAME: www-data + # MYSQL_ANTRIAN_PASSWORD: www-data + # MYSQL_ANTRIAN_DATABASE: antrian_rssa + # MYSQL_ANTRIAN_PORT: 3306 + # MYSQL_ANTRIAN_SSLMODE: disable + + # MYSQL Medical Database + MYSQL_MEDICAL_CONNECTION: mysql + MYSQL_MEDICAL_HOST: 10.10.123.163 + MYSQL_MEDICAL_USERNAME: meninjardev + MYSQL_MEDICAL_PASSWORD: meninjar*RS54 + MYSQL_MEDICAL_DATABASE: healtcare_database + MYSQL_MEDICAL_PORT: 3306 + MYSQL_MEDICAL_SSLMODE: disable + + # Keycloak Configuration + KEYCLOAK_ISSUER: https://auth.rssa.top/realms/sandbox + KEYCLOAK_AUDIENCE: nuxtsim-pendaftaran + KEYCLOAK_JWKS_URL: https://auth.rssa.top/realms/sandbox/protocol/openid-connect/certs + KEYCLOAK_ENABLED: true + + # BPJS Configuration + BPJS_BASEURL: https://apijkn.bpjs-kesehatan.go.id/vclaim-rest + BPJS_CONSID: 5257 + BPJS_USERKEY: 4cf1cbef8c008440bbe9ef9ba789e482 + BPJS_SECRETKEY: 1bV363512D + + # SatuSehat Configuration + BRIDGING_SATUSEHAT_ORG_ID: 100026555 + BRIDGING_SATUSEHAT_FASYAKES_ID: 3573011 + BRIDGING_SATUSEHAT_CLIENT_ID: l1ZgJGW6K5pnrqGUikWM7fgIoquA2AQ5UUG0U8WqHaq2VEyZ + BRIDGING_SATUSEHAT_CLIENT_SECRET: Al3PTYAW6axPiAFwaFlpn8qShLFW5YGMgG8w1qhexgCc7lGTEjjcR6zxa06ThPDy + BRIDGING_SATUSEHAT_AUTH_URL: https://api-satusehat.kemkes.go.id/oauth2/v1 + BRIDGING_SATUSEHAT_BASE_URL: https://api-satusehat.kemkes.go.id/fhir-r4/v1 + BRIDGING_SATUSEHAT_CONSENT_URL: https://api-satusehat.dto.kemkes.go.id/consent/v1 + BRIDGING_SATUSEHAT_KFA_URL: https://api-satusehat.kemkes.go.id/kfa-v2 + + # Swagger Configuration + SWAGGER_TITLE: My Custom API Service + SWAGGER_DESCRIPTION: This is a custom API service for managing various resources + SWAGGER_VERSION: 2.0.0 + SWAGGER_CONTACT_NAME: Support Team + SWAGGER_HOST: api.mycompany.com:8080 + SWAGGER_BASE_PATH: /api/v2 + SWAGGER_SCHEMES: https + + # API Configuration + API_TITLE: API Service UJICOBA + API_DESCRIPTION: Dokumentation SWAGGER + API_VERSION: 3.0.0 + + # depends_on: + # psql_bp: + # condition: service_healthy + # mongodb: + # condition: service_started + # mysql_antrian: + # condition: service_healthy + # mysql_medical: + # condition: service_healthy + networks: + - goservice + +# volumes: +# postgres_data: +# mongodb_data: +# mysql_antrian_data: +# mysql_medical_data: + +networks: + goservice: diff --git a/docs/docs.go b/docs/docs.go new file mode 100644 index 0000000..80e4eb1 --- /dev/null +++ b/docs/docs.go @@ -0,0 +1,1468 @@ +// Code generated by swaggo/swag. DO NOT EDIT. + +package docs + +import "github.com/swaggo/swag" + +const docTemplate = `{ + "schemes": {{ marshal .Schemes }}, + "swagger": "2.0", + "info": { + "description": "{{escape .Description}}", + "title": "{{.Title}}", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "API Support", + "url": "http://www.swagger.io/support", + "email": "support@swagger.io" + }, + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + }, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/Peserta/nik/:nik": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Get participant eligibility information by NIK", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Peserta" + ], + "summary": "Get Bynik data", + "parameters": [ + { + "type": "string", + "description": "Request ID for tracking", + "name": "X-Request-ID", + "in": "header" + }, + { + "type": "string", + "example": "\"example_value\"", + "description": "nik", + "name": "nik", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Successfully retrieved Bynik data", + "schema": { + "$ref": "#/definitions/peserta.PesertaResponse" + } + }, + "400": { + "description": "Bad request - invalid parameters", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "401": { + "description": "Unauthorized - invalid API credentials", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "404": { + "description": "Not found - Bynik not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + } + } + } + }, + "/Peserta/nokartu/:nokartu": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Get participant eligibility information by card number", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Peserta" + ], + "summary": "Get Bynokartu data", + "parameters": [ + { + "type": "string", + "description": "Request ID for tracking", + "name": "X-Request-ID", + "in": "header" + }, + { + "type": "string", + "example": "\"example_value\"", + "description": "nokartu", + "name": "nokartu", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Successfully retrieved Bynokartu data", + "schema": { + "$ref": "#/definitions/peserta.PesertaResponse" + } + }, + "400": { + "description": "Bad request - invalid parameters", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "401": { + "description": "Unauthorized - invalid API credentials", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "404": { + "description": "Not found - Bynokartu not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + } + } + } + }, + "/api/v1/auth/login": { + "post": { + "description": "Authenticate user with username and password to receive JWT token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Login user and get JWT token", + "parameters": [ + { + "description": "Login credentials", + "name": "login", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/models.LoginRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/me": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "Get information about the currently authenticated user", + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get current user info", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.User" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/refresh": { + "post": { + "description": "Refresh the JWT token using a valid refresh token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Refresh JWT token", + "parameters": [ + { + "description": "Refresh token", + "name": "refresh", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/register": { + "post": { + "description": "Register a new user account", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Register new user", + "parameters": [ + { + "description": "Registration data", + "name": "register", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/retribusi/{id}": { + "get": { + "description": "Returns a single retribusi by ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get Retribusi by ID", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetByIDResponse" + } + }, + "400": { + "description": "Invalid ID format", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "put": { + "description": "Updates an existing retribusi record", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Update retribusi", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "Retribusi update request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/retribusi.RetribusiUpdateRequest" + } + } + ], + "responses": { + "200": { + "description": "Retribusi updated successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiUpdateResponse" + } + }, + "400": { + "description": "Bad request or validation error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "delete": { + "description": "Soft deletes a retribusi by setting status to 'deleted'", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Delete retribusi", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Retribusi deleted successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiDeleteResponse" + } + }, + "400": { + "description": "Invalid ID format", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis": { + "get": { + "description": "Returns a paginated list of retribusis with optional summary statistics", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi with pagination and optional aggregation", + "parameters": [ + { + "type": "integer", + "default": 10, + "description": "Limit (max 100)", + "name": "limit", + "in": "query" + }, + { + "type": "integer", + "default": 0, + "description": "Offset", + "name": "offset", + "in": "query" + }, + { + "type": "boolean", + "default": false, + "description": "Include aggregation summary", + "name": "include_summary", + "in": "query" + }, + { + "type": "string", + "description": "Filter by status", + "name": "status", + "in": "query" + }, + { + "type": "string", + "description": "Filter by jenis", + "name": "jenis", + "in": "query" + }, + { + "type": "string", + "description": "Filter by dinas", + "name": "dinas", + "in": "query" + }, + { + "type": "string", + "description": "Search in multiple fields", + "name": "search", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "post": { + "description": "Creates a new retribusi record", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Create retribusi", + "parameters": [ + { + "description": "Retribusi creation request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/retribusi.RetribusiCreateRequest" + } + } + ], + "responses": { + "201": { + "description": "Retribusi created successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiCreateResponse" + } + }, + "400": { + "description": "Bad request or validation error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis/dynamic": { + "get": { + "description": "Returns retribusis with advanced dynamic filtering like Directus", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi with dynamic filtering", + "parameters": [ + { + "type": "string", + "description": "Fields to select (e.g., fields=*.*)", + "name": "fields", + "in": "query" + }, + { + "type": "string", + "description": "Dynamic filters (e.g., filter[Jenis][_eq]=value)", + "name": "filter[column][operator]", + "in": "query" + }, + { + "type": "string", + "description": "Sort fields (e.g., sort=date_created,-Jenis)", + "name": "sort", + "in": "query" + }, + { + "type": "integer", + "default": 10, + "description": "Limit", + "name": "limit", + "in": "query" + }, + { + "type": "integer", + "default": 0, + "description": "Offset", + "name": "offset", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis/stats": { + "get": { + "description": "Returns comprehensive statistics about retribusi data", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi statistics", + "parameters": [ + { + "type": "string", + "description": "Filter statistics by status", + "name": "status", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Statistics data", + "schema": { + "$ref": "#/definitions/models.AggregateData" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/token/generate": { + "post": { + "description": "Generate a JWT token for a user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Token" + ], + "summary": "Generate JWT token", + "parameters": [ + { + "description": "User credentials", + "name": "token", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/models.LoginRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/token/generate-direct": { + "post": { + "description": "Generate a JWT token directly without password verification (for testing)", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Token" + ], + "summary": "Generate token directly", + "parameters": [ + { + "description": "User info", + "name": "user", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + } + }, + "definitions": { + "models.AggregateData": { + "type": "object", + "properties": { + "by_dinas": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "by_jenis": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "by_status": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "created_today": { + "type": "integer" + }, + "last_updated": { + "type": "string" + }, + "total_active": { + "type": "integer" + }, + "total_draft": { + "type": "integer" + }, + "total_inactive": { + "type": "integer" + }, + "updated_today": { + "type": "integer" + } + } + }, + "models.ErrorResponse": { + "type": "object", + "properties": { + "code": { + "type": "integer" + }, + "error": { + "type": "string" + }, + "message": { + "type": "string" + }, + "timestamp": { + "type": "string" + } + } + }, + "models.ErrorResponseBpjs": { + "type": "object", + "properties": { + "code": { + "type": "string" + }, + "errors": { + "type": "object", + "additionalProperties": true + }, + "message": { + "type": "string" + }, + "request_id": { + "type": "string" + }, + "status": { + "type": "string" + } + } + }, + "models.LoginRequest": { + "type": "object", + "required": [ + "password", + "username" + ], + "properties": { + "password": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "models.MetaResponse": { + "type": "object", + "properties": { + "current_page": { + "type": "integer" + }, + "has_next": { + "type": "boolean" + }, + "has_prev": { + "type": "boolean" + }, + "limit": { + "type": "integer" + }, + "offset": { + "type": "integer" + }, + "total": { + "type": "integer" + }, + "total_pages": { + "type": "integer" + } + } + }, + "models.NullableInt32": { + "type": "object", + "properties": { + "int32": { + "type": "integer" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.NullableString": { + "type": "object", + "properties": { + "string": { + "type": "string" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.NullableTime": { + "type": "object", + "properties": { + "time": { + "type": "string" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.TokenResponse": { + "type": "object", + "properties": { + "access_token": { + "type": "string" + }, + "expires_in": { + "type": "integer" + }, + "token_type": { + "type": "string" + } + } + }, + "models.User": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "role": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "peserta.PesertaData": { + "type": "object", + "properties": { + "cob": { + "type": "object", + "properties": { + "nmAsuransi": {}, + "noAsuransi": {}, + "tglTAT": {}, + "tglTMT": {} + } + }, + "hakKelas": { + "type": "object", + "properties": { + "keterangan": { + "type": "string" + }, + "kode": { + "type": "string" + } + } + }, + "informasi": { + "type": "object", + "properties": { + "dinsos": {}, + "eSEP": {}, + "noSKTM": {}, + "prolanisPRB": { + "type": "string" + } + } + }, + "jenisPeserta": { + "type": "object", + "properties": { + "keterangan": { + "type": "string" + }, + "kode": { + "type": "string" + } + } + }, + "mr": { + "type": "object", + "properties": { + "noMR": { + "type": "string" + }, + "noTelepon": { + "type": "string" + } + } + }, + "nama": { + "type": "string" + }, + "nik": { + "type": "string" + }, + "noKartu": { + "type": "string" + }, + "pisa": { + "type": "string" + }, + "provUmum": { + "type": "object", + "properties": { + "kdProvider": { + "type": "string" + }, + "nmProvider": { + "type": "string" + } + } + }, + "raw_response": { + "type": "string" + }, + "sex": { + "type": "string" + }, + "statusPeserta": { + "type": "object", + "properties": { + "keterangan": { + "type": "string" + }, + "kode": { + "type": "string" + } + } + }, + "tglCetakKartu": { + "type": "string" + }, + "tglLahir": { + "type": "string" + }, + "tglTAT": { + "type": "string" + }, + "tglTMT": { + "type": "string" + }, + "umur": { + "type": "object", + "properties": { + "umurSaatPelayanan": { + "type": "string" + }, + "umurSekarang": { + "type": "string" + } + } + } + } + }, + "peserta.PesertaResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/peserta.PesertaData" + }, + "message": { + "type": "string" + }, + "metaData": {}, + "request_id": { + "type": "string" + }, + "status": { + "type": "string" + }, + "timestamp": { + "type": "string" + } + } + }, + "retribusi.Retribusi": { + "type": "object", + "properties": { + "date_created": { + "$ref": "#/definitions/models.NullableTime" + }, + "date_updated": { + "$ref": "#/definitions/models.NullableTime" + }, + "dinas": { + "$ref": "#/definitions/models.NullableString" + }, + "id": { + "type": "string" + }, + "jenis": { + "$ref": "#/definitions/models.NullableString" + }, + "kelompok_obyek": { + "$ref": "#/definitions/models.NullableString" + }, + "kode_tarif": { + "$ref": "#/definitions/models.NullableString" + }, + "pelayanan": { + "$ref": "#/definitions/models.NullableString" + }, + "rekening_denda": { + "$ref": "#/definitions/models.NullableString" + }, + "rekening_pokok": { + "$ref": "#/definitions/models.NullableString" + }, + "satuan": { + "$ref": "#/definitions/models.NullableString" + }, + "satuan_overtime": { + "$ref": "#/definitions/models.NullableString" + }, + "sort": { + "$ref": "#/definitions/models.NullableInt32" + }, + "status": { + "type": "string" + }, + "tarif": { + "$ref": "#/definitions/models.NullableString" + }, + "tarif_overtime": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_1": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_2": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_3": { + "$ref": "#/definitions/models.NullableString" + }, + "user_created": { + "$ref": "#/definitions/models.NullableString" + }, + "user_updated": { + "$ref": "#/definitions/models.NullableString" + } + } + }, + "retribusi.RetribusiCreateRequest": { + "type": "object", + "required": [ + "status" + ], + "properties": { + "dinas": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "jenis": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kelompok_obyek": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kode_tarif": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "pelayanan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_denda": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_pokok": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan_overtime": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "status": { + "type": "string", + "enum": [ + "draft", + "active", + "inactive" + ] + }, + "tarif": { + "type": "string" + }, + "tarif_overtime": { + "type": "string" + }, + "uraian_1": { + "type": "string" + }, + "uraian_2": { + "type": "string" + }, + "uraian_3": { + "type": "string" + } + } + }, + "retribusi.RetribusiCreateResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiDeleteResponse": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiGetByIDResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiGetResponse": { + "type": "object", + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/definitions/retribusi.Retribusi" + } + }, + "message": { + "type": "string" + }, + "meta": { + "$ref": "#/definitions/models.MetaResponse" + }, + "summary": { + "$ref": "#/definitions/models.AggregateData" + } + } + }, + "retribusi.RetribusiUpdateRequest": { + "type": "object", + "required": [ + "status" + ], + "properties": { + "dinas": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "jenis": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kelompok_obyek": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kode_tarif": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "pelayanan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_denda": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_pokok": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan_overtime": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "status": { + "type": "string", + "enum": [ + "draft", + "active", + "inactive" + ] + }, + "tarif": { + "type": "string" + }, + "tarif_overtime": { + "type": "string" + }, + "uraian_1": { + "type": "string" + }, + "uraian_2": { + "type": "string" + }, + "uraian_3": { + "type": "string" + } + } + }, + "retribusi.RetribusiUpdateResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + } + } +}` + +// SwaggerInfo holds exported Swagger Info so clients can modify it +var SwaggerInfo = &swag.Spec{ + Version: "1.0.0", + Host: "localhost:8080", + BasePath: "/api/v1", + Schemes: []string{"http", "https"}, + Title: "API Service", + Description: "A comprehensive Go API service with Swagger documentation", + InfoInstanceName: "swagger", + SwaggerTemplate: docTemplate, +} + +func init() { + swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo) +} diff --git a/docs/swagger.json b/docs/swagger.json new file mode 100644 index 0000000..922c9f6 --- /dev/null +++ b/docs/swagger.json @@ -0,0 +1,1449 @@ +{ + "schemes": [ + "http", + "https" + ], + "swagger": "2.0", + "info": { + "description": "A comprehensive Go API service with Swagger documentation", + "title": "API Service", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "API Support", + "url": "http://www.swagger.io/support", + "email": "support@swagger.io" + }, + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + }, + "version": "1.0.0" + }, + "host": "localhost:8080", + "basePath": "/api/v1", + "paths": { + "/Peserta/nik/:nik": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Get participant eligibility information by NIK", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Peserta" + ], + "summary": "Get Bynik data", + "parameters": [ + { + "type": "string", + "description": "Request ID for tracking", + "name": "X-Request-ID", + "in": "header" + }, + { + "type": "string", + "example": "\"example_value\"", + "description": "nik", + "name": "nik", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Successfully retrieved Bynik data", + "schema": { + "$ref": "#/definitions/peserta.PesertaResponse" + } + }, + "400": { + "description": "Bad request - invalid parameters", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "401": { + "description": "Unauthorized - invalid API credentials", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "404": { + "description": "Not found - Bynik not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + } + } + } + }, + "/Peserta/nokartu/:nokartu": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Get participant eligibility information by card number", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Peserta" + ], + "summary": "Get Bynokartu data", + "parameters": [ + { + "type": "string", + "description": "Request ID for tracking", + "name": "X-Request-ID", + "in": "header" + }, + { + "type": "string", + "example": "\"example_value\"", + "description": "nokartu", + "name": "nokartu", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Successfully retrieved Bynokartu data", + "schema": { + "$ref": "#/definitions/peserta.PesertaResponse" + } + }, + "400": { + "description": "Bad request - invalid parameters", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "401": { + "description": "Unauthorized - invalid API credentials", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "404": { + "description": "Not found - Bynokartu not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponseBpjs" + } + } + } + } + }, + "/api/v1/auth/login": { + "post": { + "description": "Authenticate user with username and password to receive JWT token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Login user and get JWT token", + "parameters": [ + { + "description": "Login credentials", + "name": "login", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/models.LoginRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/me": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "Get information about the currently authenticated user", + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get current user info", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.User" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/refresh": { + "post": { + "description": "Refresh the JWT token using a valid refresh token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Refresh JWT token", + "parameters": [ + { + "description": "Refresh token", + "name": "refresh", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/auth/register": { + "post": { + "description": "Register a new user account", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Register new user", + "parameters": [ + { + "description": "Registration data", + "name": "register", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/retribusi/{id}": { + "get": { + "description": "Returns a single retribusi by ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get Retribusi by ID", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetByIDResponse" + } + }, + "400": { + "description": "Invalid ID format", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "put": { + "description": "Updates an existing retribusi record", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Update retribusi", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "Retribusi update request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/retribusi.RetribusiUpdateRequest" + } + } + ], + "responses": { + "200": { + "description": "Retribusi updated successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiUpdateResponse" + } + }, + "400": { + "description": "Bad request or validation error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "delete": { + "description": "Soft deletes a retribusi by setting status to 'deleted'", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Delete retribusi", + "parameters": [ + { + "type": "string", + "description": "Retribusi ID (UUID)", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Retribusi deleted successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiDeleteResponse" + } + }, + "400": { + "description": "Invalid ID format", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "404": { + "description": "Retribusi not found", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis": { + "get": { + "description": "Returns a paginated list of retribusis with optional summary statistics", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi with pagination and optional aggregation", + "parameters": [ + { + "type": "integer", + "default": 10, + "description": "Limit (max 100)", + "name": "limit", + "in": "query" + }, + { + "type": "integer", + "default": 0, + "description": "Offset", + "name": "offset", + "in": "query" + }, + { + "type": "boolean", + "default": false, + "description": "Include aggregation summary", + "name": "include_summary", + "in": "query" + }, + { + "type": "string", + "description": "Filter by status", + "name": "status", + "in": "query" + }, + { + "type": "string", + "description": "Filter by jenis", + "name": "jenis", + "in": "query" + }, + { + "type": "string", + "description": "Filter by dinas", + "name": "dinas", + "in": "query" + }, + { + "type": "string", + "description": "Search in multiple fields", + "name": "search", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + }, + "post": { + "description": "Creates a new retribusi record", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Create retribusi", + "parameters": [ + { + "description": "Retribusi creation request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/retribusi.RetribusiCreateRequest" + } + } + ], + "responses": { + "201": { + "description": "Retribusi created successfully", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiCreateResponse" + } + }, + "400": { + "description": "Bad request or validation error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis/dynamic": { + "get": { + "description": "Returns retribusis with advanced dynamic filtering like Directus", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi with dynamic filtering", + "parameters": [ + { + "type": "string", + "description": "Fields to select (e.g., fields=*.*)", + "name": "fields", + "in": "query" + }, + { + "type": "string", + "description": "Dynamic filters (e.g., filter[Jenis][_eq]=value)", + "name": "filter[column][operator]", + "in": "query" + }, + { + "type": "string", + "description": "Sort fields (e.g., sort=date_created,-Jenis)", + "name": "sort", + "in": "query" + }, + { + "type": "integer", + "default": 10, + "description": "Limit", + "name": "limit", + "in": "query" + }, + { + "type": "integer", + "default": 0, + "description": "Offset", + "name": "offset", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Success response", + "schema": { + "$ref": "#/definitions/retribusi.RetribusiGetResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/retribusis/stats": { + "get": { + "description": "Returns comprehensive statistics about retribusi data", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Retribusi" + ], + "summary": "Get retribusi statistics", + "parameters": [ + { + "type": "string", + "description": "Filter statistics by status", + "name": "status", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Statistics data", + "schema": { + "$ref": "#/definitions/models.AggregateData" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/models.ErrorResponse" + } + } + } + } + }, + "/api/v1/token/generate": { + "post": { + "description": "Generate a JWT token for a user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Token" + ], + "summary": "Generate JWT token", + "parameters": [ + { + "description": "User credentials", + "name": "token", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/models.LoginRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/v1/token/generate-direct": { + "post": { + "description": "Generate a JWT token directly without password verification (for testing)", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Token" + ], + "summary": "Generate token directly", + "parameters": [ + { + "description": "User info", + "name": "user", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/models.TokenResponse" + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + } + }, + "definitions": { + "models.AggregateData": { + "type": "object", + "properties": { + "by_dinas": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "by_jenis": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "by_status": { + "type": "object", + "additionalProperties": { + "type": "integer" + } + }, + "created_today": { + "type": "integer" + }, + "last_updated": { + "type": "string" + }, + "total_active": { + "type": "integer" + }, + "total_draft": { + "type": "integer" + }, + "total_inactive": { + "type": "integer" + }, + "updated_today": { + "type": "integer" + } + } + }, + "models.ErrorResponse": { + "type": "object", + "properties": { + "code": { + "type": "integer" + }, + "error": { + "type": "string" + }, + "message": { + "type": "string" + }, + "timestamp": { + "type": "string" + } + } + }, + "models.ErrorResponseBpjs": { + "type": "object", + "properties": { + "code": { + "type": "string" + }, + "errors": { + "type": "object", + "additionalProperties": true + }, + "message": { + "type": "string" + }, + "request_id": { + "type": "string" + }, + "status": { + "type": "string" + } + } + }, + "models.LoginRequest": { + "type": "object", + "required": [ + "password", + "username" + ], + "properties": { + "password": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "models.MetaResponse": { + "type": "object", + "properties": { + "current_page": { + "type": "integer" + }, + "has_next": { + "type": "boolean" + }, + "has_prev": { + "type": "boolean" + }, + "limit": { + "type": "integer" + }, + "offset": { + "type": "integer" + }, + "total": { + "type": "integer" + }, + "total_pages": { + "type": "integer" + } + } + }, + "models.NullableInt32": { + "type": "object", + "properties": { + "int32": { + "type": "integer" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.NullableString": { + "type": "object", + "properties": { + "string": { + "type": "string" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.NullableTime": { + "type": "object", + "properties": { + "time": { + "type": "string" + }, + "valid": { + "type": "boolean" + } + } + }, + "models.TokenResponse": { + "type": "object", + "properties": { + "access_token": { + "type": "string" + }, + "expires_in": { + "type": "integer" + }, + "token_type": { + "type": "string" + } + } + }, + "models.User": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "role": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "peserta.PesertaData": { + "type": "object", + "properties": { + "cob": { + "type": "object", + "properties": { + "nmAsuransi": {}, + "noAsuransi": {}, + "tglTAT": {}, + "tglTMT": {} + } + }, + "hakKelas": { + "type": "object", + "properties": { + "keterangan": { + "type": "string" + }, + "kode": { + "type": "string" + } + } + }, + "informasi": { + "type": "object", + "properties": { + "dinsos": {}, + "eSEP": {}, + "noSKTM": {}, + "prolanisPRB": { + "type": "string" + } + } + }, + "jenisPeserta": { + "type": "object", + "properties": { + "keterangan": { + "type": "string" + }, + "kode": { + "type": "string" + } + } + }, + "mr": { + "type": "object", + "properties": { + "noMR": { + "type": "string" + }, + "noTelepon": { + "type": "string" + } + } + }, + "nama": { + "type": "string" + }, + "nik": { + "type": "string" + }, + "noKartu": { + "type": "string" + }, + "pisa": { + "type": "string" + }, + "provUmum": { + "type": "object", + "properties": { + "kdProvider": { + "type": "string" + }, + "nmProvider": { + "type": "string" + } + } + }, + "raw_response": { + "type": "string" + }, + "sex": { + "type": "string" + }, + "statusPeserta": { + "type": "object", + "properties": { + "keterangan": { + "type": "string" + }, + "kode": { + "type": "string" + } + } + }, + "tglCetakKartu": { + "type": "string" + }, + "tglLahir": { + "type": "string" + }, + "tglTAT": { + "type": "string" + }, + "tglTMT": { + "type": "string" + }, + "umur": { + "type": "object", + "properties": { + "umurSaatPelayanan": { + "type": "string" + }, + "umurSekarang": { + "type": "string" + } + } + } + } + }, + "peserta.PesertaResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/peserta.PesertaData" + }, + "message": { + "type": "string" + }, + "metaData": {}, + "request_id": { + "type": "string" + }, + "status": { + "type": "string" + }, + "timestamp": { + "type": "string" + } + } + }, + "retribusi.Retribusi": { + "type": "object", + "properties": { + "date_created": { + "$ref": "#/definitions/models.NullableTime" + }, + "date_updated": { + "$ref": "#/definitions/models.NullableTime" + }, + "dinas": { + "$ref": "#/definitions/models.NullableString" + }, + "id": { + "type": "string" + }, + "jenis": { + "$ref": "#/definitions/models.NullableString" + }, + "kelompok_obyek": { + "$ref": "#/definitions/models.NullableString" + }, + "kode_tarif": { + "$ref": "#/definitions/models.NullableString" + }, + "pelayanan": { + "$ref": "#/definitions/models.NullableString" + }, + "rekening_denda": { + "$ref": "#/definitions/models.NullableString" + }, + "rekening_pokok": { + "$ref": "#/definitions/models.NullableString" + }, + "satuan": { + "$ref": "#/definitions/models.NullableString" + }, + "satuan_overtime": { + "$ref": "#/definitions/models.NullableString" + }, + "sort": { + "$ref": "#/definitions/models.NullableInt32" + }, + "status": { + "type": "string" + }, + "tarif": { + "$ref": "#/definitions/models.NullableString" + }, + "tarif_overtime": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_1": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_2": { + "$ref": "#/definitions/models.NullableString" + }, + "uraian_3": { + "$ref": "#/definitions/models.NullableString" + }, + "user_created": { + "$ref": "#/definitions/models.NullableString" + }, + "user_updated": { + "$ref": "#/definitions/models.NullableString" + } + } + }, + "retribusi.RetribusiCreateRequest": { + "type": "object", + "required": [ + "status" + ], + "properties": { + "dinas": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "jenis": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kelompok_obyek": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kode_tarif": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "pelayanan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_denda": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_pokok": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan_overtime": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "status": { + "type": "string", + "enum": [ + "draft", + "active", + "inactive" + ] + }, + "tarif": { + "type": "string" + }, + "tarif_overtime": { + "type": "string" + }, + "uraian_1": { + "type": "string" + }, + "uraian_2": { + "type": "string" + }, + "uraian_3": { + "type": "string" + } + } + }, + "retribusi.RetribusiCreateResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiDeleteResponse": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiGetByIDResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + }, + "retribusi.RetribusiGetResponse": { + "type": "object", + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/definitions/retribusi.Retribusi" + } + }, + "message": { + "type": "string" + }, + "meta": { + "$ref": "#/definitions/models.MetaResponse" + }, + "summary": { + "$ref": "#/definitions/models.AggregateData" + } + } + }, + "retribusi.RetribusiUpdateRequest": { + "type": "object", + "required": [ + "status" + ], + "properties": { + "dinas": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "jenis": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kelompok_obyek": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "kode_tarif": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "pelayanan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_denda": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "rekening_pokok": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "satuan_overtime": { + "type": "string", + "maxLength": 255, + "minLength": 1 + }, + "status": { + "type": "string", + "enum": [ + "draft", + "active", + "inactive" + ] + }, + "tarif": { + "type": "string" + }, + "tarif_overtime": { + "type": "string" + }, + "uraian_1": { + "type": "string" + }, + "uraian_2": { + "type": "string" + }, + "uraian_3": { + "type": "string" + } + } + }, + "retribusi.RetribusiUpdateResponse": { + "type": "object", + "properties": { + "data": { + "$ref": "#/definitions/retribusi.Retribusi" + }, + "message": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/docs/swagger.yaml b/docs/swagger.yaml new file mode 100644 index 0000000..5429b71 --- /dev/null +++ b/docs/swagger.yaml @@ -0,0 +1,967 @@ +basePath: /api/v1 +definitions: + models.AggregateData: + properties: + by_dinas: + additionalProperties: + type: integer + type: object + by_jenis: + additionalProperties: + type: integer + type: object + by_status: + additionalProperties: + type: integer + type: object + created_today: + type: integer + last_updated: + type: string + total_active: + type: integer + total_draft: + type: integer + total_inactive: + type: integer + updated_today: + type: integer + type: object + models.ErrorResponse: + properties: + code: + type: integer + error: + type: string + message: + type: string + timestamp: + type: string + type: object + models.ErrorResponseBpjs: + properties: + code: + type: string + errors: + additionalProperties: true + type: object + message: + type: string + request_id: + type: string + status: + type: string + type: object + models.LoginRequest: + properties: + password: + type: string + username: + type: string + required: + - password + - username + type: object + models.MetaResponse: + properties: + current_page: + type: integer + has_next: + type: boolean + has_prev: + type: boolean + limit: + type: integer + offset: + type: integer + total: + type: integer + total_pages: + type: integer + type: object + models.NullableInt32: + properties: + int32: + type: integer + valid: + type: boolean + type: object + models.NullableString: + properties: + string: + type: string + valid: + type: boolean + type: object + models.NullableTime: + properties: + time: + type: string + valid: + type: boolean + type: object + models.TokenResponse: + properties: + access_token: + type: string + expires_in: + type: integer + token_type: + type: string + type: object + models.User: + properties: + email: + type: string + id: + type: string + role: + type: string + username: + type: string + type: object + peserta.PesertaData: + properties: + cob: + properties: + nmAsuransi: {} + noAsuransi: {} + tglTAT: {} + tglTMT: {} + type: object + hakKelas: + properties: + keterangan: + type: string + kode: + type: string + type: object + informasi: + properties: + dinsos: {} + eSEP: {} + noSKTM: {} + prolanisPRB: + type: string + type: object + jenisPeserta: + properties: + keterangan: + type: string + kode: + type: string + type: object + mr: + properties: + noMR: + type: string + noTelepon: + type: string + type: object + nama: + type: string + nik: + type: string + noKartu: + type: string + pisa: + type: string + provUmum: + properties: + kdProvider: + type: string + nmProvider: + type: string + type: object + raw_response: + type: string + sex: + type: string + statusPeserta: + properties: + keterangan: + type: string + kode: + type: string + type: object + tglCetakKartu: + type: string + tglLahir: + type: string + tglTAT: + type: string + tglTMT: + type: string + umur: + properties: + umurSaatPelayanan: + type: string + umurSekarang: + type: string + type: object + type: object + peserta.PesertaResponse: + properties: + data: + $ref: '#/definitions/peserta.PesertaData' + message: + type: string + metaData: {} + request_id: + type: string + status: + type: string + timestamp: + type: string + type: object + retribusi.Retribusi: + properties: + date_created: + $ref: '#/definitions/models.NullableTime' + date_updated: + $ref: '#/definitions/models.NullableTime' + dinas: + $ref: '#/definitions/models.NullableString' + id: + type: string + jenis: + $ref: '#/definitions/models.NullableString' + kelompok_obyek: + $ref: '#/definitions/models.NullableString' + kode_tarif: + $ref: '#/definitions/models.NullableString' + pelayanan: + $ref: '#/definitions/models.NullableString' + rekening_denda: + $ref: '#/definitions/models.NullableString' + rekening_pokok: + $ref: '#/definitions/models.NullableString' + satuan: + $ref: '#/definitions/models.NullableString' + satuan_overtime: + $ref: '#/definitions/models.NullableString' + sort: + $ref: '#/definitions/models.NullableInt32' + status: + type: string + tarif: + $ref: '#/definitions/models.NullableString' + tarif_overtime: + $ref: '#/definitions/models.NullableString' + uraian_1: + $ref: '#/definitions/models.NullableString' + uraian_2: + $ref: '#/definitions/models.NullableString' + uraian_3: + $ref: '#/definitions/models.NullableString' + user_created: + $ref: '#/definitions/models.NullableString' + user_updated: + $ref: '#/definitions/models.NullableString' + type: object + retribusi.RetribusiCreateRequest: + properties: + dinas: + maxLength: 255 + minLength: 1 + type: string + jenis: + maxLength: 255 + minLength: 1 + type: string + kelompok_obyek: + maxLength: 255 + minLength: 1 + type: string + kode_tarif: + maxLength: 255 + minLength: 1 + type: string + pelayanan: + maxLength: 255 + minLength: 1 + type: string + rekening_denda: + maxLength: 255 + minLength: 1 + type: string + rekening_pokok: + maxLength: 255 + minLength: 1 + type: string + satuan: + maxLength: 255 + minLength: 1 + type: string + satuan_overtime: + maxLength: 255 + minLength: 1 + type: string + status: + enum: + - draft + - active + - inactive + type: string + tarif: + type: string + tarif_overtime: + type: string + uraian_1: + type: string + uraian_2: + type: string + uraian_3: + type: string + required: + - status + type: object + retribusi.RetribusiCreateResponse: + properties: + data: + $ref: '#/definitions/retribusi.Retribusi' + message: + type: string + type: object + retribusi.RetribusiDeleteResponse: + properties: + id: + type: string + message: + type: string + type: object + retribusi.RetribusiGetByIDResponse: + properties: + data: + $ref: '#/definitions/retribusi.Retribusi' + message: + type: string + type: object + retribusi.RetribusiGetResponse: + properties: + data: + items: + $ref: '#/definitions/retribusi.Retribusi' + type: array + message: + type: string + meta: + $ref: '#/definitions/models.MetaResponse' + summary: + $ref: '#/definitions/models.AggregateData' + type: object + retribusi.RetribusiUpdateRequest: + properties: + dinas: + maxLength: 255 + minLength: 1 + type: string + jenis: + maxLength: 255 + minLength: 1 + type: string + kelompok_obyek: + maxLength: 255 + minLength: 1 + type: string + kode_tarif: + maxLength: 255 + minLength: 1 + type: string + pelayanan: + maxLength: 255 + minLength: 1 + type: string + rekening_denda: + maxLength: 255 + minLength: 1 + type: string + rekening_pokok: + maxLength: 255 + minLength: 1 + type: string + satuan: + maxLength: 255 + minLength: 1 + type: string + satuan_overtime: + maxLength: 255 + minLength: 1 + type: string + status: + enum: + - draft + - active + - inactive + type: string + tarif: + type: string + tarif_overtime: + type: string + uraian_1: + type: string + uraian_2: + type: string + uraian_3: + type: string + required: + - status + type: object + retribusi.RetribusiUpdateResponse: + properties: + data: + $ref: '#/definitions/retribusi.Retribusi' + message: + type: string + type: object +host: localhost:8080 +info: + contact: + email: support@swagger.io + name: API Support + url: http://www.swagger.io/support + description: A comprehensive Go API service with Swagger documentation + license: + name: Apache 2.0 + url: http://www.apache.org/licenses/LICENSE-2.0.html + termsOfService: http://swagger.io/terms/ + title: API Service + version: 1.0.0 +paths: + /Peserta/nik/:nik: + get: + consumes: + - application/json + description: Get participant eligibility information by NIK + parameters: + - description: Request ID for tracking + in: header + name: X-Request-ID + type: string + - description: nik + example: '"example_value"' + in: path + name: nik + required: true + type: string + produces: + - application/json + responses: + "200": + description: Successfully retrieved Bynik data + schema: + $ref: '#/definitions/peserta.PesertaResponse' + "400": + description: Bad request - invalid parameters + schema: + $ref: '#/definitions/models.ErrorResponseBpjs' + "401": + description: Unauthorized - invalid API credentials + schema: + $ref: '#/definitions/models.ErrorResponseBpjs' + "404": + description: Not found - Bynik not found + schema: + $ref: '#/definitions/models.ErrorResponseBpjs' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponseBpjs' + security: + - ApiKeyAuth: [] + summary: Get Bynik data + tags: + - Peserta + /Peserta/nokartu/:nokartu: + get: + consumes: + - application/json + description: Get participant eligibility information by card number + parameters: + - description: Request ID for tracking + in: header + name: X-Request-ID + type: string + - description: nokartu + example: '"example_value"' + in: path + name: nokartu + required: true + type: string + produces: + - application/json + responses: + "200": + description: Successfully retrieved Bynokartu data + schema: + $ref: '#/definitions/peserta.PesertaResponse' + "400": + description: Bad request - invalid parameters + schema: + $ref: '#/definitions/models.ErrorResponseBpjs' + "401": + description: Unauthorized - invalid API credentials + schema: + $ref: '#/definitions/models.ErrorResponseBpjs' + "404": + description: Not found - Bynokartu not found + schema: + $ref: '#/definitions/models.ErrorResponseBpjs' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponseBpjs' + security: + - ApiKeyAuth: [] + summary: Get Bynokartu data + tags: + - Peserta + /api/v1/auth/login: + post: + consumes: + - application/json + description: Authenticate user with username and password to receive JWT token + parameters: + - description: Login credentials + in: body + name: login + required: true + schema: + $ref: '#/definitions/models.LoginRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.TokenResponse' + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + "401": + description: Unauthorized + schema: + additionalProperties: + type: string + type: object + summary: Login user and get JWT token + tags: + - Authentication + /api/v1/auth/me: + get: + description: Get information about the currently authenticated user + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.User' + "401": + description: Unauthorized + schema: + additionalProperties: + type: string + type: object + security: + - Bearer: [] + summary: Get current user info + tags: + - Authentication + /api/v1/auth/refresh: + post: + consumes: + - application/json + description: Refresh the JWT token using a valid refresh token + parameters: + - description: Refresh token + in: body + name: refresh + required: true + schema: + additionalProperties: + type: string + type: object + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.TokenResponse' + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + "401": + description: Unauthorized + schema: + additionalProperties: + type: string + type: object + summary: Refresh JWT token + tags: + - Authentication + /api/v1/auth/register: + post: + consumes: + - application/json + description: Register a new user account + parameters: + - description: Registration data + in: body + name: register + required: true + schema: + additionalProperties: + type: string + type: object + produces: + - application/json + responses: + "201": + description: Created + schema: + additionalProperties: + type: string + type: object + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + summary: Register new user + tags: + - Authentication + /api/v1/retribusi/{id}: + delete: + consumes: + - application/json + description: Soft deletes a retribusi by setting status to 'deleted' + parameters: + - description: Retribusi ID (UUID) + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: Retribusi deleted successfully + schema: + $ref: '#/definitions/retribusi.RetribusiDeleteResponse' + "400": + description: Invalid ID format + schema: + $ref: '#/definitions/models.ErrorResponse' + "404": + description: Retribusi not found + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Delete retribusi + tags: + - Retribusi + get: + consumes: + - application/json + description: Returns a single retribusi by ID + parameters: + - description: Retribusi ID (UUID) + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: Success response + schema: + $ref: '#/definitions/retribusi.RetribusiGetByIDResponse' + "400": + description: Invalid ID format + schema: + $ref: '#/definitions/models.ErrorResponse' + "404": + description: Retribusi not found + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Get Retribusi by ID + tags: + - Retribusi + put: + consumes: + - application/json + description: Updates an existing retribusi record + parameters: + - description: Retribusi ID (UUID) + in: path + name: id + required: true + type: string + - description: Retribusi update request + in: body + name: request + required: true + schema: + $ref: '#/definitions/retribusi.RetribusiUpdateRequest' + produces: + - application/json + responses: + "200": + description: Retribusi updated successfully + schema: + $ref: '#/definitions/retribusi.RetribusiUpdateResponse' + "400": + description: Bad request or validation error + schema: + $ref: '#/definitions/models.ErrorResponse' + "404": + description: Retribusi not found + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Update retribusi + tags: + - Retribusi + /api/v1/retribusis: + get: + consumes: + - application/json + description: Returns a paginated list of retribusis with optional summary statistics + parameters: + - default: 10 + description: Limit (max 100) + in: query + name: limit + type: integer + - default: 0 + description: Offset + in: query + name: offset + type: integer + - default: false + description: Include aggregation summary + in: query + name: include_summary + type: boolean + - description: Filter by status + in: query + name: status + type: string + - description: Filter by jenis + in: query + name: jenis + type: string + - description: Filter by dinas + in: query + name: dinas + type: string + - description: Search in multiple fields + in: query + name: search + type: string + produces: + - application/json + responses: + "200": + description: Success response + schema: + $ref: '#/definitions/retribusi.RetribusiGetResponse' + "400": + description: Bad request + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Get retribusi with pagination and optional aggregation + tags: + - Retribusi + post: + consumes: + - application/json + description: Creates a new retribusi record + parameters: + - description: Retribusi creation request + in: body + name: request + required: true + schema: + $ref: '#/definitions/retribusi.RetribusiCreateRequest' + produces: + - application/json + responses: + "201": + description: Retribusi created successfully + schema: + $ref: '#/definitions/retribusi.RetribusiCreateResponse' + "400": + description: Bad request or validation error + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Create retribusi + tags: + - Retribusi + /api/v1/retribusis/dynamic: + get: + consumes: + - application/json + description: Returns retribusis with advanced dynamic filtering like Directus + parameters: + - description: Fields to select (e.g., fields=*.*) + in: query + name: fields + type: string + - description: Dynamic filters (e.g., filter[Jenis][_eq]=value) + in: query + name: filter[column][operator] + type: string + - description: Sort fields (e.g., sort=date_created,-Jenis) + in: query + name: sort + type: string + - default: 10 + description: Limit + in: query + name: limit + type: integer + - default: 0 + description: Offset + in: query + name: offset + type: integer + produces: + - application/json + responses: + "200": + description: Success response + schema: + $ref: '#/definitions/retribusi.RetribusiGetResponse' + "400": + description: Bad request + schema: + $ref: '#/definitions/models.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Get retribusi with dynamic filtering + tags: + - Retribusi + /api/v1/retribusis/stats: + get: + consumes: + - application/json + description: Returns comprehensive statistics about retribusi data + parameters: + - description: Filter statistics by status + in: query + name: status + type: string + produces: + - application/json + responses: + "200": + description: Statistics data + schema: + $ref: '#/definitions/models.AggregateData' + "500": + description: Internal server error + schema: + $ref: '#/definitions/models.ErrorResponse' + summary: Get retribusi statistics + tags: + - Retribusi + /api/v1/token/generate: + post: + consumes: + - application/json + description: Generate a JWT token for a user + parameters: + - description: User credentials + in: body + name: token + required: true + schema: + $ref: '#/definitions/models.LoginRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.TokenResponse' + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + "401": + description: Unauthorized + schema: + additionalProperties: + type: string + type: object + summary: Generate JWT token + tags: + - Token + /api/v1/token/generate-direct: + post: + consumes: + - application/json + description: Generate a JWT token directly without password verification (for + testing) + parameters: + - description: User info + in: body + name: user + required: true + schema: + additionalProperties: + type: string + type: object + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/models.TokenResponse' + "400": + description: Bad request + schema: + additionalProperties: + type: string + type: object + summary: Generate token directly + tags: + - Token +schemes: +- http +- https +swagger: "2.0" diff --git a/example.env b/example.env new file mode 100644 index 0000000..6c3222f --- /dev/null +++ b/example.env @@ -0,0 +1,92 @@ +# Server Configuration +PORT=8080 +GIN_MODE=debug + +# Default Database Configuration (PostgreSQL) +DB_CONNECTION=postgres +DB_USERNAME=stim +DB_PASSWORD=stim*RS54 +DB_HOST=10.10.123.165 +DB_DATABASE=satu_db +DB_PORT=5000 +DB_SSLMODE=disable + +# satudata Database Configuration (PostgreSQL) +# POSTGRES_CONNECTION=postgres +# POSTGRES_USERNAME=stim +# POSTGRES_PASSWORD=stim*RS54 +# POSTGRES_HOST=10.10.123.165 +# POSTGRES_DATABASE=satu_db +# POSTGRES_NAME=satu_db +# POSTGRES_PORT=5000 +# POSTGRES_SSLMODE=disable + + +POSTGRES_SATUDATA_CONNECTION=postgres +POSTGRES_SATUDATA_USERNAME=stim +POSTGRES_SATUDATA_PASSWORD=stim*RS54 +POSTGRES_SATUDATA_HOST=10.10.123.165 +POSTGRES_SATUDATA_DATABASE=satu_db +POSTGRES_SATUDATA_PORT=5000 +POSTGRES_SATUDATA_SSLMODE=disable + +# Mongo Database +MONGODB_MONGOHL7_CONNECTION=mongodb +MONGODB_MONGOHL7_HOST=10.10.123.206 +MONGODB_MONGOHL7_PORT=27017 +MONGODB_MONGOHL7_USER=admin +MONGODB_MONGOHL7_PASS=stim*rs54 +MONGODB_MONGOHL7_MASTER=master +MONGODB_MONGOHL7_LOCAL=local +MONGODB_MONGOHL7_SSLMODE=disable + +# MYSQL Antrian Database +MYSQL_ANTRIAN_CONNECTION=mysql +MYSQL_ANTRIAN_HOST=10.10.123.163 +MYSQL_ANTRIAN_USERNAME=www-data +MYSQL_ANTRIAN_PASSWORD=www-data +MYSQL_ANTRIAN_DATABASE=antrian_rssa +MYSQL_ANTRIAN_PORT=3306 +MYSQL_ANTRIAN_SSLMODE=disable + + +MYSQL_MEDICAL_CONNECTION=mysql +MYSQL_MEDICAL_HOST=10.10.123.147 +MYSQL_MEDICAL_USERNAME=meninjardev +MYSQL_MEDICAL_PASSWORD=meninjar*RS54 +MYSQL_MEDICAL_DATABASE=healtcare_database +MYSQL_MEDICAL_PORT=3306 +MYSQL_MEDICAL_SSLMODE=disable + +# Keycloak Configuration (optional) +KEYCLOAK_ISSUER=https://auth.rssa.top/realms/sandbox +KEYCLOAK_AUDIENCE=nuxtsim-pendaftaran +KEYCLOAK_JWKS_URL=https://auth.rssa.top/realms/sandbox/protocol/openid-connect/certs +KEYCLOAK_ENABLED=true + +# BPJS Configuration +BPJS_BASEURL=https://apijkn.bpjs-kesehatan.go.id/vclaim-rest +BPJS_CONSID=5257 +BPJS_USERKEY=4cf1cbef8c008440bbe9ef9ba789e482 +BPJS_SECRETKEY=1bV363512D + +BRIDGING_SATUSEHAT_ORG_ID=100026555 +BRIDGING_SATUSEHAT_FASYAKES_ID=3573011 +BRIDGING_SATUSEHAT_CLIENT_ID=l1ZgJGW6K5pnrqGUikWM7fgIoquA2AQ5UUG0U8WqHaq2VEyZ +BRIDGING_SATUSEHAT_CLIENT_SECRET=Al3PTYAW6axPiAFwaFlpn8qShLFW5YGMgG8w1qhexgCc7lGTEjjcR6zxa06ThPDy +BRIDGING_SATUSEHAT_AUTH_URL=https://api-satusehat.kemkes.go.id/oauth2/v1 +BRIDGING_SATUSEHAT_BASE_URL=https://api-satusehat.kemkes.go.id/fhir-r4/v1 +BRIDGING_SATUSEHAT_CONSENT_URL=https://api-satusehat.dto.kemkes.go.id/consent/v1 +BRIDGING_SATUSEHAT_KFA_URL=https://api-satusehat.kemkes.go.id/kfa-v2 + +SWAGGER_TITLE=My Custom API Service +SWAGGER_DESCRIPTION=This is a custom API service for managing various resources +SWAGGER_VERSION=2.0.0 +SWAGGER_CONTACT_NAME=STIM IT Support +SWAGGER_HOST=api.mycompany.com:8080 +SWAGGER_BASE_PATH=/api/v2 +SWAGGER_SCHEMES=https + +API_TITLE=API Service UJICOBA +API_DESCRIPTION=Dokumentation SWAGGER +API_VERSION=3.0.0 diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..5dd4cb6 --- /dev/null +++ b/go.mod @@ -0,0 +1,90 @@ +module api-service + +go 1.24.4 + +require ( + github.com/gin-gonic/gin v1.10.1 + github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/google/uuid v1.6.0 + github.com/gorilla/websocket v1.5.1 + github.com/jackc/pgx/v5 v5.7.2 // Ensure pgx is a direct dependency + go.mongodb.org/mongo-driver v1.17.3 + golang.org/x/crypto v0.41.0 + golang.org/x/sync v0.16.0 + gorm.io/driver/mysql v1.6.0 // GORM MySQL driver + gorm.io/driver/postgres v1.5.11 // Added GORM PostgreSQL driver + gorm.io/driver/sqlserver v1.6.1 // GORM SQL Server driver +) + +require ( + github.com/daku10/go-lz-string v0.0.6 + github.com/go-playground/validator/v10 v10.27.0 + github.com/go-sql-driver/mysql v1.8.1 + github.com/joho/godotenv v1.5.1 + github.com/lib/pq v1.10.9 + github.com/mashingan/smapping v0.1.19 + github.com/rs/zerolog v1.34.0 + github.com/swaggo/files v1.0.1 + github.com/swaggo/gin-swagger v1.6.0 + github.com/swaggo/swag v1.16.6 + github.com/tidwall/gjson v1.18.0 + gopkg.in/yaml.v2 v2.4.0 +) + +require ( + filippo.io/edwards25519 v1.1.0 // indirect + github.com/KyleBanks/depth v1.2.1 // indirect + github.com/PuerkitoBio/purell v1.1.1 // indirect + github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect + github.com/bytedance/sonic v1.14.0 // indirect + github.com/bytedance/sonic/loader v0.3.0 // indirect + github.com/cloudwego/base64x v0.1.6 // indirect + github.com/gabriel-vasile/mimetype v1.4.9 // indirect + github.com/gin-contrib/sse v1.1.0 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.19.6 // indirect + github.com/go-openapi/spec v0.20.4 // indirect + github.com/go-openapi/swag v0.19.15 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect + github.com/golang-sql/sqlexp v0.1.0 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mailru/easyjson v0.7.6 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/microsoft/go-mssqldb v1.8.2 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/montanaflynn/stats v0.7.1 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.0 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.3.0 // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.1.2 // indirect + github.com/xdg-go/stringprep v1.0.4 // indirect + github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect + golang.org/x/arch v0.20.0 // indirect + golang.org/x/mod v0.26.0 // indirect + golang.org/x/net v0.43.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/text v0.28.0 // indirect + golang.org/x/tools v0.35.0 // indirect + google.golang.org/protobuf v1.36.7 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + gorm.io/gorm v1.30.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..7ecaabb --- /dev/null +++ b/go.sum @@ -0,0 +1,361 @@ +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1 h1:E+OJmp2tPvt1W+amx48v1eqbjDYsgN+RzP4q16yV5eM= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1/go.mod h1:a6xsAQUZg+VsS3TJ05SRp524Hs4pZ/AeFSr5ENf0Yjo= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1/go.mod h1:uE9zaUfEQT/nbQjVi2IblCG9iaLtZsuYZ8ne+PuQ02M= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0 h1:U2rTu3Ef+7w9FHKIAXM6ZyqF3UOWJZ12zIm8zECAFfg= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0 h1:jBQA3cKT4L2rWMpgE7Yt3Hwh2aUj8KXjIGLxjHeYNNo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0/go.mod h1:4OG6tQ9EOP/MT0NMjDlRzWoVFxfu9rN9B2X+tlSVktg= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1 h1:MyVTgWR8qd/Jw1Le0NZebGBUCLbtak3bJ3z1OlqZBpw= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1/go.mod h1:GpPjLhVR9dnUoJMyHWSPy71xY9/lcmpzIPZXmF0FCVY= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= +github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= +github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ= +github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA= +github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA= +github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= +github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= +github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/daku10/go-lz-string v0.0.6 h1:aO8FFp4QPuNp7+WNyh1DyNjGF3UbZu95tUv9xOZNsYQ= +github.com/daku10/go-lz-string v0.0.6/go.mod h1:Vk++rSG3db8HXJaHEAbxiy/ukjTmPBw/iI+SrVZDzfs= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= +github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= +github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= +github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= +github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4= +github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk= +github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= +github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= +github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ= +github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs= +github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= +github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M= +github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= +github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= +github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= +github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.7.2 h1:mLoDLV6sonKlvjIEsV56SkWNCnuNv531l94GaIzO+XI= +github.com/jackc/pgx/v5 v5.7.2/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= +github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= +github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= +github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= +github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= +github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= +github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mashingan/smapping v0.1.19 h1:SsEtuPn2UcM1croIupPtGLgWgpYRuS0rSQMvKD9g2BQ= +github.com/mashingan/smapping v0.1.19/go.mod h1:FjfiwFxGOuNxL/OT1WcrNAwTPx0YJeg5JiXwBB1nyig= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/microsoft/go-mssqldb v1.8.2 h1:236sewazvC8FvG6Dr3bszrVhMkAl4KYImryLkRMCd0I= +github.com/microsoft/go-mssqldb v1.8.2/go.mod h1:vp38dT33FGfVotRiTmDo3bFyaHq+p3LektQrjTULowo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= +github.com/montanaflynn/stats v0.7.0/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= +github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY= +github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE= +github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg= +github.com/swaggo/gin-swagger v1.6.0 h1:y8sxvQ3E20/RCyrXeFfg60r6H0Z+SwpTjMYsMm+zy8M= +github.com/swaggo/gin-swagger v1.6.0/go.mod h1:BG00cCEy294xtVpyIAHG6+e2Qzj/xKlRdOqDkvq0uzo= +github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI= +github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA= +github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= +github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= +github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= +github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ= +go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c= +golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= +golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= +golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= +golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= +golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0= +golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A= +google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg= +gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo= +gorm.io/driver/postgres v1.5.11 h1:ubBVAfbKEUld/twyKZ0IYn9rSQh448EdelLYk9Mv314= +gorm.io/driver/postgres v1.5.11/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI= +gorm.io/driver/sqlserver v1.6.1 h1:XWISFsu2I2pqd1KJhhTZNJMx1jNQ+zVL/Q8ovDcUjtY= +gorm.io/driver/sqlserver v1.6.1/go.mod h1:VZeNn7hqX1aXoN5TPAFGWvxWG90xtA8erGn2gQmpc6U= +gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= +gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/internal/config/config.go b/internal/config/config.go new file mode 100644 index 0000000..f34deb4 --- /dev/null +++ b/internal/config/config.go @@ -0,0 +1,739 @@ +package config + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "fmt" + "log" + "os" + "strconv" + "strings" + "time" + + "github.com/go-playground/validator/v10" +) + +type Config struct { + Server ServerConfig + Databases map[string]DatabaseConfig + ReadReplicas map[string][]DatabaseConfig // For read replicas + Keycloak KeycloakConfig + Bpjs BpjsConfig + SatuSehat SatuSehatConfig + Swagger SwaggerConfig + Validator *validator.Validate +} + +type SwaggerConfig struct { + Title string + Description string + Version string + TermsOfService string + ContactName string + ContactURL string + ContactEmail string + LicenseName string + LicenseURL string + Host string + BasePath string + Schemes []string +} + +type ServerConfig struct { + Port int + Mode string +} + +type DatabaseConfig struct { + Name string + Type string // postgres, mysql, sqlserver, sqlite, mongodb + Host string + Port int + Username string + Password string + Database string + Schema string + SSLMode string + Path string // For SQLite + Options string // Additional connection options + MaxOpenConns int // Max open connections + MaxIdleConns int // Max idle connections + ConnMaxLifetime time.Duration // Connection max lifetime +} + +type KeycloakConfig struct { + Issuer string + Audience string + JwksURL string + Enabled bool +} + +type BpjsConfig struct { + BaseURL string `json:"base_url"` + ConsID string `json:"cons_id"` + UserKey string `json:"user_key"` + SecretKey string `json:"secret_key"` + Timeout time.Duration `json:"timeout"` +} + +type SatuSehatConfig struct { + OrgID string `json:"org_id"` + FasyakesID string `json:"fasyakes_id"` + ClientID string `json:"client_id"` + ClientSecret string `json:"client_secret"` + AuthURL string `json:"auth_url"` + BaseURL string `json:"base_url"` + ConsentURL string `json:"consent_url"` + KFAURL string `json:"kfa_url"` + Timeout time.Duration `json:"timeout"` +} + +// SetHeader generates required headers for BPJS VClaim API +// func (cfg BpjsConfig) SetHeader() (string, string, string, string, string) { +// timenow := time.Now().UTC() +// t, err := time.Parse(time.RFC3339, "1970-01-01T00:00:00Z") +// if err != nil { +// log.Fatal(err) +// } + +// tstamp := timenow.Unix() - t.Unix() +// secret := []byte(cfg.SecretKey) +// message := []byte(cfg.ConsID + "&" + fmt.Sprint(tstamp)) +// hash := hmac.New(sha256.New, secret) +// hash.Write(message) + +// // to lowercase hexits +// hex.EncodeToString(hash.Sum(nil)) +// // to base64 +// xSignature := base64.StdEncoding.EncodeToString(hash.Sum(nil)) + +// return cfg.ConsID, cfg.SecretKey, cfg.UserKey, fmt.Sprint(tstamp), xSignature +// } +func (cfg BpjsConfig) SetHeader() (string, string, string, string, string) { + timenow := time.Now().UTC() + t, err := time.Parse(time.RFC3339, "1970-01-01T00:00:00Z") + if err != nil { + log.Fatal(err) + } + + tstamp := timenow.Unix() - t.Unix() + secret := []byte(cfg.SecretKey) + message := []byte(cfg.ConsID + "&" + fmt.Sprint(tstamp)) + hash := hmac.New(sha256.New, secret) + hash.Write(message) + + // to lowercase hexits + hex.EncodeToString(hash.Sum(nil)) + // to base64 + xSignature := base64.StdEncoding.EncodeToString(hash.Sum(nil)) + + return cfg.ConsID, cfg.SecretKey, cfg.UserKey, fmt.Sprint(tstamp), xSignature +} + +type ConfigBpjs struct { + Cons_id string + Secret_key string + User_key string +} + +// SetHeader for backward compatibility +func (cfg ConfigBpjs) SetHeader() (string, string, string, string, string) { + bpjsConfig := BpjsConfig{ + ConsID: cfg.Cons_id, + SecretKey: cfg.Secret_key, + UserKey: cfg.User_key, + } + return bpjsConfig.SetHeader() +} + +func LoadConfig() *Config { + config := &Config{ + Server: ServerConfig{ + Port: getEnvAsInt("PORT", 8080), + Mode: getEnv("GIN_MODE", "debug"), + }, + Databases: make(map[string]DatabaseConfig), + ReadReplicas: make(map[string][]DatabaseConfig), + Keycloak: KeycloakConfig{ + Issuer: getEnv("KEYCLOAK_ISSUER", "https://keycloak.example.com/auth/realms/yourrealm"), + Audience: getEnv("KEYCLOAK_AUDIENCE", "your-client-id"), + JwksURL: getEnv("KEYCLOAK_JWKS_URL", "https://keycloak.example.com/auth/realms/yourrealm/protocol/openid-connect/certs"), + Enabled: getEnvAsBool("KEYCLOAK_ENABLED", true), + }, + Bpjs: BpjsConfig{ + BaseURL: getEnv("BPJS_BASEURL", "https://apijkn.bpjs-kesehatan.go.id"), + ConsID: getEnv("BPJS_CONSID", ""), + UserKey: getEnv("BPJS_USERKEY", ""), + SecretKey: getEnv("BPJS_SECRETKEY", ""), + Timeout: parseDuration(getEnv("BPJS_TIMEOUT", "30s")), + }, + SatuSehat: SatuSehatConfig{ + OrgID: getEnv("BRIDGING_SATUSEHAT_ORG_ID", ""), + FasyakesID: getEnv("BRIDGING_SATUSEHAT_FASYAKES_ID", ""), + ClientID: getEnv("BRIDGING_SATUSEHAT_CLIENT_ID", ""), + ClientSecret: getEnv("BRIDGING_SATUSEHAT_CLIENT_SECRET", ""), + AuthURL: getEnv("BRIDGING_SATUSEHAT_AUTH_URL", "https://api-satusehat.kemkes.go.id/oauth2/v1"), + BaseURL: getEnv("BRIDGING_SATUSEHAT_BASE_URL", "https://api-satusehat.kemkes.go.id/fhir-r4/v1"), + ConsentURL: getEnv("BRIDGING_SATUSEHAT_CONSENT_URL", "https://api-satusehat.dto.kemkes.go.id/consent/v1"), + KFAURL: getEnv("BRIDGING_SATUSEHAT_KFA_URL", "https://api-satusehat.kemkes.go.id/kfa-v2"), + Timeout: parseDuration(getEnv("BRIDGING_SATUSEHAT_TIMEOUT", "30s")), + }, + Swagger: SwaggerConfig{ + Title: getEnv("SWAGGER_TITLE", "SERVICE API"), + Description: getEnv("SWAGGER_DESCRIPTION", "CUSTUM SERVICE API"), + Version: getEnv("SWAGGER_VERSION", "1.0.0"), + TermsOfService: getEnv("SWAGGER_TERMS_OF_SERVICE", "http://swagger.io/terms/"), + ContactName: getEnv("SWAGGER_CONTACT_NAME", "API Support"), + ContactURL: getEnv("SWAGGER_CONTACT_URL", "http://rssa.example.com/support"), + ContactEmail: getEnv("SWAGGER_CONTACT_EMAIL", "support@swagger.io"), + LicenseName: getEnv("SWAGGER_LICENSE_NAME", "Apache 2.0"), + LicenseURL: getEnv("SWAGGER_LICENSE_URL", "http://www.apache.org/licenses/LICENSE-2.0.html"), + Host: getEnv("SWAGGER_HOST", "localhost:8080"), + BasePath: getEnv("SWAGGER_BASE_PATH", "/api/v1"), + Schemes: parseSchemes(getEnv("SWAGGER_SCHEMES", "http,https")), + }, + } + + // Initialize validator + config.Validator = validator.New() + + // Load database configurations + config.loadDatabaseConfigs() + + // Load read replica configurations + config.loadReadReplicaConfigs() + + return config +} + +func (c *Config) loadDatabaseConfigs() { + // Simplified approach: Directly load from environment variables + // This ensures we get the exact values specified in .env + + // Primary database configuration + c.Databases["default"] = DatabaseConfig{ + Name: "default", + Type: getEnv("DB_CONNECTION", "postgres"), + Host: getEnv("DB_HOST", "localhost"), + Port: getEnvAsInt("DB_PORT", 5432), + Username: getEnv("DB_USERNAME", ""), + Password: getEnv("DB_PASSWORD", ""), + Database: getEnv("DB_DATABASE", "satu_db"), + Schema: getEnv("DB_SCHEMA", "public"), + SSLMode: getEnv("DB_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("DB_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt("DB_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv("DB_CONN_MAX_LIFETIME", "5m")), + } + + // SATUDATA database configuration + c.addPostgreSQLConfigs() + + // MongoDB database configuration + c.addMongoDBConfigs() + + // Legacy support for backward compatibility + envVars := os.Environ() + dbConfigs := make(map[string]map[string]string) + + // Parse database configurations from environment variables + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + value := parts[1] + + // Parse specific database configurations + if strings.HasSuffix(key, "_CONNECTION") || strings.HasSuffix(key, "_HOST") || + strings.HasSuffix(key, "_DATABASE") || strings.HasSuffix(key, "_USERNAME") || + strings.HasSuffix(key, "_PASSWORD") || strings.HasSuffix(key, "_PORT") || + strings.HasSuffix(key, "_NAME") { + + segments := strings.Split(key, "_") + if len(segments) >= 2 { + dbName := strings.ToLower(strings.Join(segments[:len(segments)-1], "_")) + property := strings.ToLower(segments[len(segments)-1]) + + if dbConfigs[dbName] == nil { + dbConfigs[dbName] = make(map[string]string) + } + dbConfigs[dbName][property] = value + } + } + } + + // Create DatabaseConfig from parsed configurations for additional databases + for name, config := range dbConfigs { + // Skip empty configurations or system configurations + if name == "" || strings.Contains(name, "chrome_crashpad_pipe") || name == "primary" { + continue + } + + dbConfig := DatabaseConfig{ + Name: name, + Type: getEnvFromMap(config, "connection", getEnvFromMap(config, "type", "postgres")), + Host: getEnvFromMap(config, "host", "localhost"), + Port: getEnvAsIntFromMap(config, "port", 5432), + Username: getEnvFromMap(config, "username", ""), + Password: getEnvFromMap(config, "password", ""), + Database: getEnvFromMap(config, "database", getEnvFromMap(config, "name", name)), + Schema: getEnvFromMap(config, "schema", "public"), + SSLMode: getEnvFromMap(config, "sslmode", "disable"), + Path: getEnvFromMap(config, "path", ""), + Options: getEnvFromMap(config, "options", ""), + MaxOpenConns: getEnvAsIntFromMap(config, "max_open_conns", 25), + MaxIdleConns: getEnvAsIntFromMap(config, "max_idle_conns", 25), + ConnMaxLifetime: parseDuration(getEnvFromMap(config, "conn_max_lifetime", "5m")), + } + + // Skip if username is empty and it's not a system config + if dbConfig.Username == "" && !strings.HasPrefix(name, "chrome") { + continue + } + + c.Databases[name] = dbConfig + } +} + +func (c *Config) loadReadReplicaConfigs() { + envVars := os.Environ() + + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + value := parts[1] + + // Parse read replica configurations (format: [DBNAME]_REPLICA_[INDEX]_[PROPERTY]) + if strings.Contains(key, "_REPLICA_") { + segments := strings.Split(key, "_") + if len(segments) >= 5 && strings.ToUpper(segments[2]) == "REPLICA" { + dbName := strings.ToLower(segments[1]) + replicaIndex := segments[3] + property := strings.ToLower(strings.Join(segments[4:], "_")) + + replicaKey := dbName + "_replica_" + replicaIndex + + if c.ReadReplicas[dbName] == nil { + c.ReadReplicas[dbName] = []DatabaseConfig{} + } + + // Find or create replica config + var replicaConfig *DatabaseConfig + for i := range c.ReadReplicas[dbName] { + if c.ReadReplicas[dbName][i].Name == replicaKey { + replicaConfig = &c.ReadReplicas[dbName][i] + break + } + } + + if replicaConfig == nil { + // Create new replica config + newConfig := DatabaseConfig{ + Name: replicaKey, + Type: c.Databases[dbName].Type, + Host: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_HOST", c.Databases[dbName].Host), + Port: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_PORT", c.Databases[dbName].Port), + Username: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_USERNAME", c.Databases[dbName].Username), + Password: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_PASSWORD", c.Databases[dbName].Password), + Database: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_DATABASE", c.Databases[dbName].Database), + Schema: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SCHEMA", c.Databases[dbName].Schema), + SSLMode: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SSLMODE", c.Databases[dbName].SSLMode), + MaxOpenConns: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_OPEN_CONNS", c.Databases[dbName].MaxOpenConns), + MaxIdleConns: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_IDLE_CONNS", c.Databases[dbName].MaxIdleConns), + ConnMaxLifetime: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_CONN_MAX_LIFETIME", "5m")), + } + c.ReadReplicas[dbName] = append(c.ReadReplicas[dbName], newConfig) + replicaConfig = &c.ReadReplicas[dbName][len(c.ReadReplicas[dbName])-1] + } + + // Update the specific replica + switch property { + case "host": + replicaConfig.Host = value + case "port": + replicaConfig.Port = getEnvAsInt(key, 5432) + case "username": + replicaConfig.Username = value + case "password": + replicaConfig.Password = value + case "database": + replicaConfig.Database = value + case "schema": + replicaConfig.Schema = value + case "sslmode": + replicaConfig.SSLMode = value + case "max_open_conns": + replicaConfig.MaxOpenConns = getEnvAsInt(key, 25) + case "max_idle_conns": + replicaConfig.MaxIdleConns = getEnvAsInt(key, 25) + case "conn_max_lifetime": + replicaConfig.ConnMaxLifetime = parseDuration(value) + } + } + } + } +} + +func (c *Config) addSpecificDatabase(prefix, defaultType string) { + connection := getEnv(strings.ToUpper(prefix)+"_CONNECTION", defaultType) + host := getEnv(strings.ToUpper(prefix)+"_HOST", "") + if host != "" { + dbConfig := DatabaseConfig{ + Name: prefix, + Type: connection, + Host: host, + Port: getEnvAsInt(strings.ToUpper(prefix)+"_PORT", 5432), + Username: getEnv(strings.ToUpper(prefix)+"_USERNAME", ""), + Password: getEnv(strings.ToUpper(prefix)+"_PASSWORD", ""), + Database: getEnv(strings.ToUpper(prefix)+"_DATABASE", getEnv(strings.ToUpper(prefix)+"_NAME", prefix)), + Schema: getEnv(strings.ToUpper(prefix)+"_SCHEMA", "public"), + SSLMode: getEnv(strings.ToUpper(prefix)+"_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt(strings.ToUpper(prefix)+"_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt(strings.ToUpper(prefix)+"_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv(strings.ToUpper(prefix)+"_CONN_MAX_LIFETIME", "5m")), + } + c.Databases[prefix] = dbConfig + } +} + +// PostgreSQL database +func (c *Config) addPostgreSQLConfigs() { + // SATUDATA database configuration + // defaultPOSTGRESHost := getEnv("POSTGRES_HOST", "localhost") + // if defaultPOSTGRESHost != "" { + // c.Databases["postgres"] = DatabaseConfig{ + // Name: "postgres", + // Type: getEnv("POSTGRES_CONNECTION", "postgres"), + // Host: defaultPOSTGRESHost, + // Port: getEnvAsInt("POSTGRES_PORT", 5432), + // Username: getEnv("POSTGRES_USERNAME", ""), + // Password: getEnv("POSTGRES_PASSWORD", ""), + // Database: getEnv("POSTGRES_DATABASE", "postgres"), + // Schema: getEnv("POSTGRES_SCHEMA", "public"), + // SSLMode: getEnv("POSTGRES_SSLMODE", "disable"), + // MaxOpenConns: getEnvAsInt("POSTGRES_MAX_OPEN_CONNS", 25), + // MaxIdleConns: getEnvAsInt("POSTGRES_MAX_IDLE_CONNS", 25), + // ConnMaxLifetime: parseDuration(getEnv("POSTGRES_CONN_MAX_LIFETIME", "5m")), + // } + // } + + // Support for custom PostgreSQL configurations with POSTGRES_ prefix + envVars := os.Environ() + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + // Parse PostgreSQL configurations (format: POSTGRES_[NAME]_[PROPERTY]) + if strings.HasPrefix(key, "POSTGRES_") && strings.Contains(key, "_") { + segments := strings.Split(key, "_") + if len(segments) >= 3 { + dbName := strings.ToLower(strings.Join(segments[1:len(segments)-1], "_")) + + // Skip if it's a standard PostgreSQL configuration + if dbName == "connection" || dbName == "dev" || dbName == "default" || dbName == "satudata" { + continue + } + + // Create or update PostgreSQL configuration + if _, exists := c.Databases[dbName]; !exists { + c.Databases[dbName] = DatabaseConfig{ + Name: dbName, + Type: "postgres", + Host: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_HOST", "localhost"), + Port: getEnvAsInt("POSTGRES_"+strings.ToUpper(dbName)+"_PORT", 5432), + Username: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_USERNAME", ""), + Password: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_PASSWORD", ""), + Database: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_DATABASE", dbName), + Schema: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_SCHEMA", "public"), + SSLMode: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("POSTGRES_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt("POSTGRES_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv("POSTGRES_CONN_MAX_LIFETIME", "5m")), + } + } + } + } + } +} + +// addMYSQLConfigs adds MYSQL database +func (c *Config) addMySQLConfigs() { + // Primary MySQL configuration + defaultMySQLHost := getEnv("MYSQL_HOST", "") + if defaultMySQLHost != "" { + c.Databases["mysql"] = DatabaseConfig{ + Name: "mysql", + Type: getEnv("MYSQL_CONNECTION", "mysql"), + Host: defaultMySQLHost, + Port: getEnvAsInt("MYSQL_PORT", 3306), + Username: getEnv("MYSQL_USERNAME", ""), + Password: getEnv("MYSQL_PASSWORD", ""), + Database: getEnv("MYSQL_DATABASE", "mysql"), + SSLMode: getEnv("MYSQL_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MYSQL_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt("MYSQL_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv("MYSQL_CONN_MAX_LIFETIME", "5m")), + } + } + + // Support for custom MySQL configurations with MYSQL_ prefix + envVars := os.Environ() + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + // Parse MySQL configurations (format: MYSQL_[NAME]_[PROPERTY]) + if strings.HasPrefix(key, "MYSQL_") && strings.Contains(key, "_") { + segments := strings.Split(key, "_") + if len(segments) >= 3 { + dbName := strings.ToLower(strings.Join(segments[1:len(segments)-1], "_")) + + // Skip if it's a standard MySQL configuration + if dbName == "connection" || dbName == "dev" || dbName == "max" || dbName == "conn" { + continue + } + + // Create or update MySQL configuration + if _, exists := c.Databases[dbName]; !exists { + mysqlHost := getEnv("MYSQL_"+strings.ToUpper(dbName)+"_HOST", "") + if mysqlHost != "" { + c.Databases[dbName] = DatabaseConfig{ + Name: dbName, + Type: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_CONNECTION", "mysql"), + Host: mysqlHost, + Port: getEnvAsInt("MYSQL_"+strings.ToUpper(dbName)+"_PORT", 3306), + Username: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_USERNAME", ""), + Password: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_PASSWORD", ""), + Database: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_DATABASE", dbName), + SSLMode: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MYSQL_MAX_OPEN_CONNS", 25), + MaxIdleConns: getEnvAsInt("MYSQL_MAX_IDLE_CONNS", 25), + ConnMaxLifetime: parseDuration(getEnv("MYSQL_CONN_MAX_LIFETIME", "5m")), + } + } + } + } + } + } +} + +// addMongoDBConfigs adds MongoDB database configurations from environment variables +func (c *Config) addMongoDBConfigs() { + // Primary MongoDB configuration + mongoHost := getEnv("MONGODB_HOST", "") + if mongoHost != "" { + c.Databases["mongodb"] = DatabaseConfig{ + Name: "mongodb", + Type: getEnv("MONGODB_CONNECTION", "mongodb"), + Host: mongoHost, + Port: getEnvAsInt("MONGODB_PORT", 27017), + Username: getEnv("MONGODB_USER", ""), + Password: getEnv("MONGODB_PASS", ""), + Database: getEnv("MONGODB_MASTER", "master"), + SSLMode: getEnv("MONGODB_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100), + MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10), + ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")), + } + } + + // Additional MongoDB configurations for local database + mongoLocalHost := getEnv("MONGODB_LOCAL_HOST", "") + if mongoLocalHost != "" { + c.Databases["mongodb_local"] = DatabaseConfig{ + Name: "mongodb_local", + Type: getEnv("MONGODB_CONNECTION", "mongodb"), + Host: mongoLocalHost, + Port: getEnvAsInt("MONGODB_LOCAL_PORT", 27017), + Username: getEnv("MONGODB_LOCAL_USER", ""), + Password: getEnv("MONGODB_LOCAL_PASS", ""), + Database: getEnv("MONGODB_LOCAL_DB", "local"), + SSLMode: getEnv("MONGOD_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100), + MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10), + ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")), + } + } + + // Support for custom MongoDB configurations with MONGODB_ prefix + envVars := os.Environ() + for _, envVar := range envVars { + parts := strings.SplitN(envVar, "=", 2) + if len(parts) != 2 { + continue + } + + key := parts[0] + // Parse MongoDB configurations (format: MONGODB_[NAME]_[PROPERTY]) + if strings.HasPrefix(key, "MONGODB_") && strings.Contains(key, "_") { + segments := strings.Split(key, "_") + if len(segments) >= 3 { + dbName := strings.ToLower(strings.Join(segments[1:len(segments)-1], "_")) + // Skip if it's a standard MongoDB configuration + if dbName == "connection" || dbName == "dev" || dbName == "local" { + continue + } + + // Create or update MongoDB configuration + if _, exists := c.Databases[dbName]; !exists { + c.Databases[dbName] = DatabaseConfig{ + Name: dbName, + Type: "mongodb", + Host: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_HOST", "localhost"), + Port: getEnvAsInt("MONGODB_"+strings.ToUpper(dbName)+"_PORT", 27017), + Username: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_USER", ""), + Password: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_PASS", ""), + Database: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_DB", dbName), + SSLMode: getEnv("MONGOD_SSLMODE", "disable"), + MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100), + MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10), + ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")), + } + } + } + } + } +} + +func getEnvFromMap(config map[string]string, key, defaultValue string) string { + if value, exists := config[key]; exists { + return value + } + return defaultValue +} + +func getEnvAsIntFromMap(config map[string]string, key string, defaultValue int) int { + if value, exists := config[key]; exists { + if intValue, err := strconv.Atoi(value); err == nil { + return intValue + } + } + return defaultValue +} + +func parseDuration(durationStr string) time.Duration { + if duration, err := time.ParseDuration(durationStr); err == nil { + return duration + } + return 5 * time.Minute +} + +func getEnv(key, defaultValue string) string { + if value := os.Getenv(key); value != "" { + return value + } + return defaultValue +} + +func getEnvAsInt(key string, defaultValue int) int { + valueStr := getEnv(key, "") + if value, err := strconv.Atoi(valueStr); err == nil { + return value + } + return defaultValue +} + +func getEnvAsBool(key string, defaultValue bool) bool { + valueStr := getEnv(key, "") + if value, err := strconv.ParseBool(valueStr); err == nil { + return value + } + return defaultValue +} + +// parseSchemes parses comma-separated schemes string into a slice +func parseSchemes(schemesStr string) []string { + if schemesStr == "" { + return []string{"http"} + } + + schemes := strings.Split(schemesStr, ",") + for i, scheme := range schemes { + schemes[i] = strings.TrimSpace(scheme) + } + return schemes +} + +func (c *Config) Validate() error { + if len(c.Databases) == 0 { + log.Fatal("At least one database configuration is required") + } + + for name, db := range c.Databases { + if db.Host == "" { + log.Fatalf("Database host is required for %s", name) + } + if db.Username == "" { + log.Fatalf("Database username is required for %s", name) + } + if db.Password == "" { + log.Fatalf("Database password is required for %s", name) + } + if db.Database == "" { + log.Fatalf("Database name is required for %s", name) + } + } + + if c.Bpjs.BaseURL == "" { + log.Fatal("BPJS Base URL is required") + } + if c.Bpjs.ConsID == "" { + log.Fatal("BPJS Consumer ID is required") + } + if c.Bpjs.UserKey == "" { + log.Fatal("BPJS User Key is required") + } + if c.Bpjs.SecretKey == "" { + log.Fatal("BPJS Secret Key is required") + } + + // Validate Keycloak configuration if enabled + if c.Keycloak.Enabled { + if c.Keycloak.Issuer == "" { + log.Fatal("Keycloak issuer is required when Keycloak is enabled") + } + if c.Keycloak.Audience == "" { + log.Fatal("Keycloak audience is required when Keycloak is enabled") + } + if c.Keycloak.JwksURL == "" { + log.Fatal("Keycloak JWKS URL is required when Keycloak is enabled") + } + } + + // Validate SatuSehat configuration + if c.SatuSehat.OrgID == "" { + log.Fatal("SatuSehat Organization ID is required") + } + if c.SatuSehat.FasyakesID == "" { + log.Fatal("SatuSehat Fasyankes ID is required") + } + if c.SatuSehat.ClientID == "" { + log.Fatal("SatuSehat Client ID is required") + } + if c.SatuSehat.ClientSecret == "" { + log.Fatal("SatuSehat Client Secret is required") + } + if c.SatuSehat.AuthURL == "" { + log.Fatal("SatuSehat Auth URL is required") + } + if c.SatuSehat.BaseURL == "" { + log.Fatal("SatuSehat Base URL is required") + } + + return nil +} diff --git a/internal/database/database.go b/internal/database/database.go new file mode 100644 index 0000000..b7f5b4f --- /dev/null +++ b/internal/database/database.go @@ -0,0 +1,699 @@ +package database + +import ( + "context" + "database/sql" + "fmt" + "log" // Import runtime package + + // Import debug package + "strconv" + "sync" + "time" + + "api-service/internal/config" + + _ "github.com/jackc/pgx/v5" // Import pgx driver + "github.com/lib/pq" + _ "gorm.io/driver/postgres" // Import GORM PostgreSQL driver + + _ "github.com/go-sql-driver/mysql" // MySQL driver for database/sql + _ "gorm.io/driver/mysql" // GORM MySQL driver + _ "gorm.io/driver/sqlserver" // GORM SQL Server driver + + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +// DatabaseType represents supported database types +type DatabaseType string + +const ( + Postgres DatabaseType = "postgres" + MySQL DatabaseType = "mysql" + SQLServer DatabaseType = "sqlserver" + SQLite DatabaseType = "sqlite" + MongoDB DatabaseType = "mongodb" +) + +// Service represents a service that interacts with multiple databases +type Service interface { + Health() map[string]map[string]string + GetDB(name string) (*sql.DB, error) + GetMongoClient(name string) (*mongo.Client, error) + GetReadDB(name string) (*sql.DB, error) // For read replicas + Close() error + ListDBs() []string + GetDBType(name string) (DatabaseType, error) + // Tambahkan method untuk WebSocket notifications + ListenForChanges(ctx context.Context, dbName string, channels []string, callback func(string, string)) error + NotifyChange(dbName, channel, payload string) error + GetPrimaryDB(name string) (*sql.DB, error) // Helper untuk get primary DB +} + +type service struct { + sqlDatabases map[string]*sql.DB + mongoClients map[string]*mongo.Client + readReplicas map[string][]*sql.DB // Read replicas for load balancing + configs map[string]config.DatabaseConfig + readConfigs map[string][]config.DatabaseConfig + mu sync.RWMutex + readBalancer map[string]int // Round-robin counter for read replicas + listeners map[string]*pq.Listener // Tambahkan untuk tracking listeners + listenersMu sync.RWMutex +} + +var ( + dbManager *service + once sync.Once +) + +// New creates a new database service with multiple connections +func New(cfg *config.Config) Service { + once.Do(func() { + dbManager = &service{ + sqlDatabases: make(map[string]*sql.DB), + mongoClients: make(map[string]*mongo.Client), + readReplicas: make(map[string][]*sql.DB), + configs: make(map[string]config.DatabaseConfig), + readConfigs: make(map[string][]config.DatabaseConfig), + readBalancer: make(map[string]int), + listeners: make(map[string]*pq.Listener), + } + + log.Println("Initializing database service...") // Log when the initialization starts + // log.Printf("Current Goroutine ID: %d", runtime.NumGoroutine()) // Log the number of goroutines + // log.Printf("Stack Trace: %s", debug.Stack()) // Log the stack trace + dbManager.loadFromConfig(cfg) + + // Initialize all databases + for name, dbConfig := range dbManager.configs { + if err := dbManager.addDatabase(name, dbConfig); err != nil { + log.Printf("Failed to connect to database %s: %v", name, err) + } + } + + // Initialize read replicas + for name, replicaConfigs := range dbManager.readConfigs { + for i, replicaConfig := range replicaConfigs { + if err := dbManager.addReadReplica(name, i, replicaConfig); err != nil { + log.Printf("Failed to connect to read replica %s[%d]: %v", name, i, err) + } + } + } + }) + + return dbManager +} + +func (s *service) loadFromConfig(cfg *config.Config) { + s.mu.Lock() + defer s.mu.Unlock() + + // Load primary databases + for name, dbConfig := range cfg.Databases { + s.configs[name] = dbConfig + } + + // Load read replicas + for name, replicaConfigs := range cfg.ReadReplicas { + s.readConfigs[name] = replicaConfigs + } +} + +func (s *service) addDatabase(name string, config config.DatabaseConfig) error { + s.mu.Lock() + defer s.mu.Unlock() + + log.Printf("=== Database Connection Debug ===") + // log.Printf("Database: %s", name) + // log.Printf("Type: %s", config.Type) + // log.Printf("Host: %s", config.Host) + // log.Printf("Port: %d", config.Port) + // log.Printf("Database: %s", config.Database) + // log.Printf("Username: %s", config.Username) + // log.Printf("SSLMode: %s", config.SSLMode) + + var db *sql.DB + var err error + + dbType := DatabaseType(config.Type) + + switch dbType { + case Postgres: + db, err = s.openPostgresConnection(config) + case MySQL: + db, err = s.openMySQLConnection(config) + case SQLServer: + db, err = s.openSQLServerConnection(config) + case SQLite: + db, err = s.openSQLiteConnection(config) + case MongoDB: + return s.addMongoDB(name, config) + default: + return fmt.Errorf("unsupported database type: %s", config.Type) + } + + if err != nil { + log.Printf("โŒ Error connecting to database %s: %v", name, err) + log.Printf(" Database: %s@%s:%d/%s", config.Username, config.Host, config.Port, config.Database) + return err + } + + log.Printf("โœ… Successfully connected to database: %s", name) + return s.configureSQLDB(name, db, config.MaxOpenConns, config.MaxIdleConns, config.ConnMaxLifetime) +} + +func (s *service) addReadReplica(name string, index int, config config.DatabaseConfig) error { + s.mu.Lock() + defer s.mu.Unlock() + + var db *sql.DB + var err error + + dbType := DatabaseType(config.Type) + + switch dbType { + case Postgres: + db, err = s.openPostgresConnection(config) + case MySQL: + db, err = s.openMySQLConnection(config) + case SQLServer: + db, err = s.openSQLServerConnection(config) + case SQLite: + db, err = s.openSQLiteConnection(config) + default: + return fmt.Errorf("unsupported database type for read replica: %s", config.Type) + } + + if err != nil { + return err + } + + if s.readReplicas[name] == nil { + s.readReplicas[name] = make([]*sql.DB, 0) + } + + // Ensure we have enough slots + for len(s.readReplicas[name]) <= index { + s.readReplicas[name] = append(s.readReplicas[name], nil) + } + + s.readReplicas[name][index] = db + log.Printf("Successfully connected to read replica %s[%d]", name, index) + + return nil +} + +func (s *service) openPostgresConnection(config config.DatabaseConfig) (*sql.DB, error) { + connStr := fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=%s", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + config.SSLMode, + ) + + if config.Schema != "" { + connStr += "&search_path=" + config.Schema + } + + db, err := sql.Open("pgx", connStr) + if err != nil { + return nil, fmt.Errorf("failed to open PostgreSQL connection: %w", err) + } + + return db, nil +} + +func (s *service) openMySQLConnection(config config.DatabaseConfig) (*sql.DB, error) { + connStr := fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?parseTime=true", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + ) + + db, err := sql.Open("mysql", connStr) + if err != nil { + return nil, fmt.Errorf("failed to open MySQL connection: %w", err) + } + + return db, nil +} + +func (s *service) openSQLServerConnection(config config.DatabaseConfig) (*sql.DB, error) { + connStr := fmt.Sprintf("sqlserver://%s:%s@%s:%d?database=%s", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + ) + + db, err := sql.Open("sqlserver", connStr) + if err != nil { + return nil, fmt.Errorf("failed to open SQL Server connection: %w", err) + } + + return db, nil +} + +func (s *service) openSQLiteConnection(config config.DatabaseConfig) (*sql.DB, error) { + dbPath := config.Path + if dbPath == "" { + dbPath = fmt.Sprintf("./data/%s.db", config.Database) + } + + db, err := sql.Open("sqlite3", dbPath) + if err != nil { + return nil, fmt.Errorf("failed to open SQLite connection: %w", err) + } + + return db, nil +} + +func (s *service) addMongoDB(name string, config config.DatabaseConfig) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + uri := fmt.Sprintf("mongodb://%s:%s@%s:%d/%s", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + ) + + client, err := mongo.Connect(ctx, options.Client().ApplyURI(uri)) + if err != nil { + return fmt.Errorf("failed to connect to MongoDB: %w", err) + } + + s.mongoClients[name] = client + log.Printf("Successfully connected to MongoDB: %s", name) + + return nil +} + +func (s *service) configureSQLDB(name string, db *sql.DB, maxOpenConns, maxIdleConns int, connMaxLifetime time.Duration) error { + db.SetMaxOpenConns(maxOpenConns) + db.SetMaxIdleConns(maxIdleConns) + db.SetConnMaxLifetime(connMaxLifetime) + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + if err := db.PingContext(ctx); err != nil { + db.Close() + return fmt.Errorf("failed to ping database: %w", err) + } + + s.sqlDatabases[name] = db + log.Printf("Successfully connected to SQL database: %s", name) + + return nil +} + +// Health checks the health of all database connections by pinging each database. +func (s *service) Health() map[string]map[string]string { + s.mu.RLock() + defer s.mu.RUnlock() + + result := make(map[string]map[string]string) + + // Check SQL databases + for name, db := range s.sqlDatabases { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + stats := make(map[string]string) + + err := db.PingContext(ctx) + if err != nil { + stats["status"] = "down" + stats["error"] = fmt.Sprintf("db down: %v", err) + stats["type"] = "sql" + stats["role"] = "primary" + result[name] = stats + continue + } + + stats["status"] = "up" + stats["message"] = "It's healthy" + stats["type"] = "sql" + stats["role"] = "primary" + + dbStats := db.Stats() + stats["open_connections"] = strconv.Itoa(dbStats.OpenConnections) + stats["in_use"] = strconv.Itoa(dbStats.InUse) + stats["idle"] = strconv.Itoa(dbStats.Idle) + stats["wait_count"] = strconv.FormatInt(dbStats.WaitCount, 10) + stats["wait_duration"] = dbStats.WaitDuration.String() + stats["max_idle_closed"] = strconv.FormatInt(dbStats.MaxIdleClosed, 10) + stats["max_lifetime_closed"] = strconv.FormatInt(dbStats.MaxLifetimeClosed, 10) + + if dbStats.OpenConnections > 40 { + stats["message"] = "The database is experiencing heavy load." + } + + if dbStats.WaitCount > 1000 { + stats["message"] = "The database has a high number of wait events, indicating potential bottlenecks." + } + + if dbStats.MaxIdleClosed > int64(dbStats.OpenConnections)/2 { + stats["message"] = "Many idle connections are being closed, consider revising the connection pool settings." + } + + if dbStats.MaxLifetimeClosed > int64(dbStats.OpenConnections)/2 { + stats["message"] = "Many connections are being closed due to max lifetime, consider increasing max lifetime or revising the connection usage pattern." + } + + result[name] = stats + } + + // Check read replicas + for name, replicas := range s.readReplicas { + for i, db := range replicas { + if db == nil { + continue + } + + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + replicaName := fmt.Sprintf("%s_replica_%d", name, i) + stats := make(map[string]string) + + err := db.PingContext(ctx) + if err != nil { + stats["status"] = "down" + stats["error"] = fmt.Sprintf("read replica down: %v", err) + stats["type"] = "sql" + stats["role"] = "replica" + result[replicaName] = stats + continue + } + + stats["status"] = "up" + stats["message"] = "Read replica healthy" + stats["type"] = "sql" + stats["role"] = "replica" + + dbStats := db.Stats() + stats["open_connections"] = strconv.Itoa(dbStats.OpenConnections) + stats["in_use"] = strconv.Itoa(dbStats.InUse) + stats["idle"] = strconv.Itoa(dbStats.Idle) + + result[replicaName] = stats + } + } + + // Check MongoDB connections + for name, client := range s.mongoClients { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + stats := make(map[string]string) + + err := client.Ping(ctx, nil) + if err != nil { + stats["status"] = "down" + stats["error"] = fmt.Sprintf("mongodb down: %v", err) + stats["type"] = "mongodb" + result[name] = stats + continue + } + + stats["status"] = "up" + stats["message"] = "It's healthy" + stats["type"] = "mongodb" + + result[name] = stats + } + + return result +} + +// GetDB returns a specific SQL database connection by name +func (s *service) GetDB(name string) (*sql.DB, error) { + log.Printf("Attempting to get database connection for: %s", name) + s.mu.RLock() + defer s.mu.RUnlock() + + db, exists := s.sqlDatabases[name] + if !exists { + log.Printf("Error: database %s not found", name) // Log the error + return nil, fmt.Errorf("database %s not found", name) + } + + log.Printf("Current connection pool state for %s: Open: %d, In Use: %d, Idle: %d", + name, db.Stats().OpenConnections, db.Stats().InUse, db.Stats().Idle) + s.mu.RLock() + defer s.mu.RUnlock() + + // db, exists := s.sqlDatabases[name] + // if !exists { + // log.Printf("Error: database %s not found", name) // Log the error + // return nil, fmt.Errorf("database %s not found", name) + // } + + return db, nil +} + +// GetReadDB returns a read replica connection using round-robin load balancing +func (s *service) GetReadDB(name string) (*sql.DB, error) { + s.mu.RLock() + defer s.mu.RUnlock() + + replicas, exists := s.readReplicas[name] + if !exists || len(replicas) == 0 { + // Fallback to primary if no replicas available + return s.GetDB(name) + } + + // Round-robin load balancing + s.readBalancer[name] = (s.readBalancer[name] + 1) % len(replicas) + selected := replicas[s.readBalancer[name]] + + if selected == nil { + // Fallback to primary if replica is nil + return s.GetDB(name) + } + + return selected, nil +} + +// GetMongoClient returns a specific MongoDB client by name +func (s *service) GetMongoClient(name string) (*mongo.Client, error) { + s.mu.RLock() + defer s.mu.RUnlock() + + client, exists := s.mongoClients[name] + if !exists { + return nil, fmt.Errorf("MongoDB client %s not found", name) + } + + return client, nil +} + +// ListDBs returns list of available database names +func (s *service) ListDBs() []string { + s.mu.RLock() + defer s.mu.RUnlock() + + names := make([]string, 0, len(s.sqlDatabases)+len(s.mongoClients)) + + for name := range s.sqlDatabases { + names = append(names, name) + } + + for name := range s.mongoClients { + names = append(names, name) + } + + return names +} + +// GetDBType returns the type of a specific database +func (s *service) GetDBType(name string) (DatabaseType, error) { + s.mu.RLock() + defer s.mu.RUnlock() + + config, exists := s.configs[name] + if !exists { + return "", fmt.Errorf("database %s not found", name) + } + + return DatabaseType(config.Type), nil +} + +// Close closes all database connections +func (s *service) Close() error { + s.mu.Lock() + defer s.mu.Unlock() + + var errs []error + + for name, db := range s.sqlDatabases { + if err := db.Close(); err != nil { + errs = append(errs, fmt.Errorf("failed to close database %s: %w", name, err)) + } else { + log.Printf("Disconnected from SQL database: %s", name) + } + } + + for name, replicas := range s.readReplicas { + for i, db := range replicas { + if db != nil { + if err := db.Close(); err != nil { + errs = append(errs, fmt.Errorf("failed to close read replica %s[%d]: %w", name, i, err)) + } else { + log.Printf("Disconnected from read replica: %s[%d]", name, i) + } + } + } + } + + for name, client := range s.mongoClients { + if err := client.Disconnect(context.Background()); err != nil { + errs = append(errs, fmt.Errorf("failed to disconnect MongoDB client %s: %w", name, err)) + } else { + log.Printf("Disconnected from MongoDB: %s", name) + } + } + + s.sqlDatabases = make(map[string]*sql.DB) + s.mongoClients = make(map[string]*mongo.Client) + s.readReplicas = make(map[string][]*sql.DB) + s.configs = make(map[string]config.DatabaseConfig) + s.readConfigs = make(map[string][]config.DatabaseConfig) + + if len(errs) > 0 { + return fmt.Errorf("errors closing databases: %v", errs) + } + + return nil +} + +// GetPrimaryDB returns primary database connection +func (s *service) GetPrimaryDB(name string) (*sql.DB, error) { + return s.GetDB(name) +} + +// ListenForChanges implements PostgreSQL LISTEN/NOTIFY for real-time updates +func (s *service) ListenForChanges(ctx context.Context, dbName string, channels []string, callback func(string, string)) error { + s.mu.RLock() + config, exists := s.configs[dbName] + s.mu.RUnlock() + + if !exists { + return fmt.Errorf("database %s not found", dbName) + } + + // Only support PostgreSQL for LISTEN/NOTIFY + if DatabaseType(config.Type) != Postgres { + return fmt.Errorf("LISTEN/NOTIFY only supported for PostgreSQL databases") + } + + // Create connection string for listener + connStr := fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=%s", + config.Username, + config.Password, + config.Host, + config.Port, + config.Database, + config.SSLMode, + ) + + // Create listener + listener := pq.NewListener( + connStr, + 10*time.Second, + time.Minute, + func(ev pq.ListenerEventType, err error) { + if err != nil { + log.Printf("Database listener (%s) error: %v", dbName, err) + } + }, + ) + + // Store listener for cleanup + s.listenersMu.Lock() + s.listeners[dbName] = listener + s.listenersMu.Unlock() + + // Listen to specified channels + for _, channel := range channels { + err := listener.Listen(channel) + if err != nil { + listener.Close() + return fmt.Errorf("failed to listen to channel %s: %w", channel, err) + } + log.Printf("Listening to database channel: %s on %s", channel, dbName) + } + + // Start listening loop + go func() { + defer func() { + listener.Close() + s.listenersMu.Lock() + delete(s.listeners, dbName) + s.listenersMu.Unlock() + log.Printf("Database listener for %s stopped", dbName) + }() + + for { + select { + case n := <-listener.Notify: + if n != nil { + callback(n.Channel, n.Extra) + } + case <-ctx.Done(): + return + case <-time.After(90 * time.Second): + // Send ping to keep connection alive + go func() { + if err := listener.Ping(); err != nil { + log.Printf("Listener ping failed for %s: %v", dbName, err) + } + }() + } + } + }() + + return nil +} + +// NotifyChange sends a notification to a PostgreSQL channel +func (s *service) NotifyChange(dbName, channel, payload string) error { + db, err := s.GetDB(dbName) + if err != nil { + return fmt.Errorf("failed to get database %s: %w", dbName, err) + } + + // Check if it's PostgreSQL + s.mu.RLock() + config, exists := s.configs[dbName] + s.mu.RUnlock() + + if !exists { + return fmt.Errorf("database %s configuration not found", dbName) + } + + if DatabaseType(config.Type) != Postgres { + return fmt.Errorf("NOTIFY only supported for PostgreSQL databases") + } + + // Execute NOTIFY + query := "SELECT pg_notify($1, $2)" + _, err = db.Exec(query, channel, payload) + if err != nil { + return fmt.Errorf("failed to send notification: %w", err) + } + + log.Printf("Sent notification to channel %s on %s: %s", channel, dbName, payload) + return nil +} diff --git a/internal/handlers/auth/auth.go b/internal/handlers/auth/auth.go new file mode 100644 index 0000000..3bd74dd --- /dev/null +++ b/internal/handlers/auth/auth.go @@ -0,0 +1,132 @@ +package handlers + +import ( + models "api-service/internal/models/auth" + services "api-service/internal/services/auth" + "net/http" + + "github.com/gin-gonic/gin" +) + +// AuthHandler handles authentication endpoints +type AuthHandler struct { + authService *services.AuthService +} + +// NewAuthHandler creates a new authentication handler +func NewAuthHandler(authService *services.AuthService) *AuthHandler { + return &AuthHandler{ + authService: authService, + } +} + +// Login godoc +// @Summary Login user and get JWT token +// @Description Authenticate user with username and password to receive JWT token +// @Tags Authentication +// @Accept json +// @Produce json +// @Param login body models.LoginRequest true "Login credentials" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Failure 401 {object} map[string]string "Unauthorized" +// @Router /api/v1/auth/login [post] +func (h *AuthHandler) Login(c *gin.Context) { + var loginReq models.LoginRequest + + // Bind JSON request + if err := c.ShouldBindJSON(&loginReq); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Authenticate user + tokenResponse, err := h.authService.Login(loginReq.Username, loginReq.Password) + if err != nil { + c.JSON(http.StatusUnauthorized, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, tokenResponse) +} + +// RefreshToken godoc +// @Summary Refresh JWT token +// @Description Refresh the JWT token using a valid refresh token +// @Tags Authentication +// @Accept json +// @Produce json +// @Param refresh body map[string]string true "Refresh token" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Failure 401 {object} map[string]string "Unauthorized" +// @Router /api/v1/auth/refresh [post] +func (h *AuthHandler) RefreshToken(c *gin.Context) { + // For now, this is a placeholder for refresh token functionality + // In a real implementation, you would handle refresh tokens here + c.JSON(http.StatusNotImplemented, gin.H{"error": "refresh token not implemented"}) +} + +// Register godoc +// @Summary Register new user +// @Description Register a new user account +// @Tags Authentication +// @Accept json +// @Produce json +// @Param register body map[string]string true "Registration data" +// @Success 201 {object} map[string]string +// @Failure 400 {object} map[string]string "Bad request" +// @Router /api/v1/auth/register [post] +func (h *AuthHandler) Register(c *gin.Context) { + var registerReq struct { + Username string `json:"username" binding:"required"` + Email string `json:"email" binding:"required,email"` + Password string `json:"password" binding:"required,min=6"` + Role string `json:"role" binding:"required"` + } + + if err := c.ShouldBindJSON(®isterReq); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + err := h.authService.RegisterUser( + registerReq.Username, + registerReq.Email, + registerReq.Password, + registerReq.Role, + ) + + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusCreated, gin.H{"message": "user registered successfully"}) +} + +// Me godoc +// @Summary Get current user info +// @Description Get information about the currently authenticated user +// @Tags Authentication +// @Produce json +// @Security Bearer +// @Success 200 {object} models.User +// @Failure 401 {object} map[string]string "Unauthorized" +// @Router /api/v1/auth/me [get] +func (h *AuthHandler) Me(c *gin.Context) { + // Get user info from context (set by middleware) + userID, exists := c.Get("user_id") + if !exists { + c.JSON(http.StatusUnauthorized, gin.H{"error": "user not authenticated"}) + return + } + + // In a real implementation, you would fetch user details from database + c.JSON(http.StatusOK, gin.H{ + "id": userID, + "username": c.GetString("username"), + "email": c.GetString("email"), + "role": c.GetString("role"), + }) +} diff --git a/internal/handlers/auth/token.go b/internal/handlers/auth/token.go new file mode 100644 index 0000000..02383c7 --- /dev/null +++ b/internal/handlers/auth/token.go @@ -0,0 +1,95 @@ +package handlers + +import ( + models "api-service/internal/models/auth" + services "api-service/internal/services/auth" + "net/http" + + "github.com/gin-gonic/gin" +) + +// TokenHandler handles token generation endpoints +type TokenHandler struct { + authService *services.AuthService +} + +// NewTokenHandler creates a new token handler +func NewTokenHandler(authService *services.AuthService) *TokenHandler { + return &TokenHandler{ + authService: authService, + } +} + +// GenerateToken godoc +// @Summary Generate JWT token +// @Description Generate a JWT token for a user +// @Tags Token +// @Accept json +// @Produce json +// @Param token body models.LoginRequest true "User credentials" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Failure 401 {object} map[string]string "Unauthorized" +// @Router /api/v1/token/generate [post] +func (h *TokenHandler) GenerateToken(c *gin.Context) { + var loginReq models.LoginRequest + + // Bind JSON request + if err := c.ShouldBindJSON(&loginReq); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Generate token + tokenResponse, err := h.authService.Login(loginReq.Username, loginReq.Password) + if err != nil { + c.JSON(http.StatusUnauthorized, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, tokenResponse) +} + +// GenerateTokenDirect godoc +// @Summary Generate token directly +// @Description Generate a JWT token directly without password verification (for testing) +// @Tags Token +// @Accept json +// @Produce json +// @Param user body map[string]string true "User info" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Router /api/v1/token/generate-direct [post] +func (h *TokenHandler) GenerateTokenDirect(c *gin.Context) { + var req struct { + Username string `json:"username" binding:"required"` + Email string `json:"email" binding:"required"` + Role string `json:"role" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Create a temporary user for token generation + user := &models.User{ + ID: "temp-" + req.Username, + Username: req.Username, + Email: req.Email, + Role: req.Role, + } + + // Generate token directly + token, err := h.authService.GenerateTokenForUser(user) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, models.TokenResponse{ + AccessToken: token, + TokenType: "Bearer", + ExpiresIn: 3600, + }) +} diff --git a/internal/handlers/healthcheck/healthcheck.go b/internal/handlers/healthcheck/healthcheck.go new file mode 100644 index 0000000..d109bff --- /dev/null +++ b/internal/handlers/healthcheck/healthcheck.go @@ -0,0 +1,24 @@ +package healthcheck + +import ( + "api-service/internal/database" + "net/http" + + "github.com/gin-gonic/gin" +) + +// HealthCheckHandler handles health check requests +type HealthCheckHandler struct { + dbService database.Service +} + +// NewHealthCheckHandler creates a new HealthCheckHandler +func NewHealthCheckHandler(dbService database.Service) *HealthCheckHandler { + return &HealthCheckHandler{dbService: dbService} +} + +// CheckHealth checks the health of the application +func (h *HealthCheckHandler) CheckHealth(c *gin.Context) { + healthStatus := h.dbService.Health() // Call the health check function from the database service + c.JSON(http.StatusOK, healthStatus) +} diff --git a/internal/handlers/peserta/peserta.go b/internal/handlers/peserta/peserta.go new file mode 100644 index 0000000..8ba77e8 --- /dev/null +++ b/internal/handlers/peserta/peserta.go @@ -0,0 +1,605 @@ +// Package peserta handles Peserta BPJS services +// Generated on: 2025-09-07 11:01:18 +package handlers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "api-service/internal/config" + "api-service/internal/database" + "api-service/internal/models" + "api-service/internal/models/vclaim/peserta" + services "api-service/internal/services/bpjs" + "api-service/pkg/logger" + + "github.com/gin-gonic/gin" + "github.com/go-playground/validator/v10" + "github.com/google/uuid" +) + +// PesertaHandler handles Peserta BPJS services +type PesertaHandler struct { + service services.VClaimService + db database.Service + validator *validator.Validate + logger logger.Logger + config config.BpjsConfig +} + +// PesertaHandlerConfig contains configuration for PesertaHandler +type PesertaHandlerConfig struct { + Config *config.Config + Logger logger.Logger + Validator *validator.Validate +} + +// NewPesertaHandler creates a new PesertaHandler +func NewPesertaHandler(cfg PesertaHandlerConfig) *PesertaHandler { + return &PesertaHandler{ + db: database.New(cfg.Config), + service: services.NewService(cfg.Config.Bpjs), + validator: cfg.Validator, + logger: cfg.Logger, + config: cfg.Config.Bpjs, + } +} + +// min returns the minimum of two integers +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// cleanResponse removes invalid characters and BOM from the response string +func cleanResponse(resp string) string { + // Remove UTF-8 BOM + // Konversi string ke byte slice untuk pengecekan BOM + data := []byte(resp) + // Cek dan hapus semua jenis representasi UTF-8 BOM + // 1. Byte sequence: EF BB BF + if len(data) >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF { + data = data[3:] + } + // 2. Unicode character: U+FEFF + if len(data) >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF { + data = data[3:] + } + // 3. Zero Width No-Break Space (Unicode) + if len(data) >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF { + data = data[3:] + } + // 4. Representasi heksadesimal lainnya + if len(data) >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF { + data = data[3:] + } + // Konversi kembali ke string + resp = string(data) + + // Hapus karakter null + // Hapus semua karakter kontrol ASCII (0-31) kecuali whitespace yang valid + controlChars := []string{ + "\x00", // Null character + "\x01", // Start of Heading + "\x02", // Start of Text + "\x03", // End of Text + "\x04", // End of Transmission (EOT) + "\x05", // Enquiry + "\x06", // Acknowledge + "\x07", // Bell + "\x08", // Backspace + "\x0B", // Vertical Tab + "\x0C", // Form Feed + "\x0E", // Shift Out + "\x0F", // Shift In + "\x10", // Data Link Escape + "\x11", // Device Control 1 + "\x12", // Device Control 2 + "\x13", // Device Control 3 + "\x14", // Device Control 4 + "\x15", // Negative Acknowledge + "\x16", // Synchronous Idle + "\x17", // End of Transmission Block + "\x18", // Cancel + "\x19", // End of Medium + "\x1A", // Substitute + "\x1B", // Escape + "\x1C", // File Separator + "\x1D", // Group Separator + "\x1E", // Record Separator + "\x1F", // Unit Separator + } + + for _, char := range controlChars { + resp = strings.ReplaceAll(resp, char, "") + } + + // Hapus karakter invalid termasuk backtick + invalidChars := []string{ + "ยข", // Cent sign + "\u00a2", // Cent sign Unicode + "\u0080", // Control character + "`", // Backtick + "ยด", // Acute accent + "โ€˜", // Left single quote + "โ€™", // Right single quote + "โ€œ", // Left double quote + "โ€", // Right double quote + } + + for _, char := range invalidChars { + resp = strings.ReplaceAll(resp, char, "") + } + // Gunakan buffer pool untuk efisiensi memori + var bufPool = sync.Pool{ + New: func() interface{} { + return &strings.Builder{} + }, + } + buf := bufPool.Get().(*strings.Builder) + defer func() { + buf.Reset() + bufPool.Put(buf) + }() + + // Definisikan karakter yang diperbolehkan + allowedChars := map[rune]bool{ + '\n': true, '\r': true, '\t': true, + // Tambahkan karakter non-ASCII yang diperbolehkan jika adafalse + // Contoh: + // Latin-1 Supplement + // ASCII printable (32-126) kecuali backtick (96) + '!': true, '"': true, '#': true, '$': true, '%': true, '&': true, + '\'': true, '(': true, ')': true, '*': true, '+': true, ',': true, + '-': true, '.': true, '/': true, '0': true, '1': true, '2': true, + '3': true, '4': true, '5': true, '6': true, '7': true, '8': true, + '9': true, ':': true, ';': true, '<': true, '=': true, '>': true, + '?': true, '@': true, 'A': true, 'B': true, 'C': true, 'D': true, + 'E': true, 'F': true, 'G': true, 'H': true, 'I': true, 'J': true, + 'K': true, 'L': true, 'M': true, 'N': true, 'O': true, 'P': true, + 'Q': true, 'R': true, 'S': true, 'T': true, 'U': true, 'V': true, + 'W': true, 'X': true, 'Y': true, 'Z': true, '[': true, '\\': true, + ']': true, '^': true, '_': true, 'a': true, 'b': true, 'c': true, + 'd': true, 'e': true, 'f': true, 'g': true, 'h': true, 'i': true, + 'j': true, 'k': true, 'l': true, 'm': true, 'n': true, 'o': true, + 'p': true, 'q': true, 'r': true, 's': true, 't': true, 'u': true, + 'v': true, 'w': true, 'x': true, 'y': true, 'z': true, '{': true, + '|': true, '}': true, '~': true, + + // Latin-1 Supplement + 'ยก': true, 'ยข': true, 'ยฃ': true, 'ยค': true, 'ยฅ': true, 'ยฆ': true, + 'ยง': true, 'ยจ': true, 'ยฉ': true, 'ยช': true, 'ยซ': true, 'ยฌ': true, + 'ยฎ': true, 'ยฏ': true, 'ยฐ': true, 'ยฑ': true, 'ยฒ': true, 'ยณ': true, + 'ยด': true, 'ยต': true, 'ยถ': true, 'ยท': true, 'ยธ': true, 'ยน': true, + 'ยบ': true, 'ยป': true, 'ยผ': true, 'ยฝ': true, 'ยพ': true, 'ยฟ': true, + + // Huruf Latin dengan diakritik (Lowercase) + 'รก': true, 'รฉ': true, 'รญ': true, 'รณ': true, 'รบ': true, 'รฝ': true, 'รพ': true, + 'ร ': true, 'รจ': true, 'รฌ': true, 'รฒ': true, 'รน': true, + 'รข': true, 'รช': true, 'รฎ': true, 'รด': true, 'รป': true, + 'รค': true, 'รซ': true, 'รฏ': true, 'รถ': true, 'รผ': true, 'รฟ': true, + 'รฃ': true, 'รต': true, 'รฑ': true, 'รง': true, + 'ฤ': true, 'ฤ“': true, 'ฤซ': true, 'ล': true, 'ลซ': true, + 'ฤƒ': true, 'ฤ‘': true, 'ฤง': true, 'ฤณ': true, 'ฤธ': true, 'ล‚': true, + 'ล‹': true, 'ล“': true, 'ลง': true, 'รŸ': true, + + // Huruf Latin dengan diakritik (Uppercase) + 'ร': true, 'ร‰': true, 'ร': true, 'ร“': true, 'รš': true, 'ร': true, 'รž': true, + 'ร€': true, 'รˆ': true, 'รŒ': true, 'ร’': true, 'ร™': true, + 'ร‚': true, 'รŠ': true, 'รŽ': true, 'ร”': true, 'ร›': true, + 'ร„': true, 'ร‹': true, 'ร': true, 'ร–': true, 'รœ': true, + 'รƒ': true, 'ร•': true, 'ร‘': true, 'ร‡': true, + 'ฤ€': true, 'ฤ’': true, 'ฤช': true, 'ลŒ': true, 'ลช': true, + 'ฤ‚': true, 'ฤ': true, 'ฤฆ': true, 'ฤฒ': true, 'ล': true, + 'ลŠ': true, 'ล’': true, 'ลฆ': true, 'แบž': true, + + // Karakter Nordik dan lainnya + 'ร…': true, 'รฅ': true, 'ร†': true, 'รฆ': true, 'ร˜': true, 'รธ': true, + 'ลฟ': true, 'ล‰': true, 'ล€': true, + + // Tanda baca dan simbol matematika + 'โ€': true, 'โ€“': true, 'โ€”': true, 'โ€•': true, 'โ€–': true, 'โ€—': true, + 'โ€ ': true, 'โ€ก': true, 'โ€ข': true, 'โ€ฃ': true, 'โ€ค': true, 'โ€ฅ': true, + 'โ€ฆ': true, 'โ€ง': true, 'โ€ฐ': true, 'โ€ฒ': true, 'โ€ณ': true, 'โ€ด': true, + 'โ€ต': true, 'โ€ถ': true, 'โ€ท': true, 'โ€ธ': true, 'โ€น': true, 'โ€บ': true, + 'โ€ป': true, + + // Simbol mata uang (hanya yang umum) + 'โ‚ฌ': true, 'โ‚น': true, + + // Karakter lain yang mungkin diperlukan + } + + // Filter karakter menggunakan buffer pool + for _, r := range resp { + if r < 128 || allowedChars[r] { + buf.WriteRune(r) + } + } + // Trim whitespace + result := strings.TrimSpace(buf.String()) + return result +} + +// extractCode extracts the code field from metaData using reflection +func extractCode(metaData interface{}) interface{} { + v := reflect.ValueOf(metaData) + switch v.Kind() { + case reflect.Struct: + codeField := v.FieldByName("Code") + if codeField.IsValid() { + return codeField.Interface() + } + case reflect.Map: + if m, ok := metaData.(map[string]interface{}); ok { + return m["code"] + } + case reflect.String: + var metaMap map[string]interface{} + if err := json.Unmarshal([]byte(metaData.(string)), &metaMap); err == nil { + return metaMap["code"] + } + } + return nil +} + +// parseHTTPStatusCode extracts HTTP status code from error message +func parseHTTPStatusCode(errMsg string) int { + if strings.Contains(errMsg, "HTTP error:") { + parts := strings.Split(errMsg, "HTTP error:") + if len(parts) > 1 { + statusPart := strings.TrimSpace(parts[1]) + if statusCode, err := strconv.Atoi(strings.Fields(statusPart)[0]); err == nil { + return statusCode + } + } + } + return 500 // Default to internal server error +} +func (h *PesertaHandler) isValidJSON(str string) bool { + var js interface{} + return json.Unmarshal([]byte(str), &js) == nil +} + +// GetBynik godoc +// @Summary Get Bynik data +// @Description Get participant eligibility information by NIK +// @Tags Peserta +// @Accept json +// @Produce json +// @Security ApiKeyAuth +// @Param X-Request-ID header string false "Request ID for tracking" +// @Param nik path string true "nik" example("example_value") +// @Success 200 {object} peserta.PesertaResponse "Successfully retrieved Bynik data" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - Bynik not found" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router /Peserta/nik/:nik [get] +func (h *PesertaHandler) GetBynik(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logger.Error("Database connection failed", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Database connection failed", + RequestID: requestID, + }) + return + } + // Note: dbConn is available for future database operations (e.g., caching, logging) + _ = dbConn // Prevent unused variable warning + + // Context Paramaeter + now := time.Now() + dateStr := now.Format("2006-01-02") + fmt.Println("Date (YYYY-MM-DD):", dateStr) + h.logger.Info("Processing GetBynik request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "/Peserta/nik/:nik/tglSEP/" + dateStr, + "nik": c.Param("nik"), + }) + + // Extract path parameters + + nik := c.Param("nik") + if nik == "" || nik == ":nik" { + + h.logger.Error("Missing required parameter nik", map[string]interface{}{ + "request_id": requestID, + }) + + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Parameter NIK Masih Kosong / Isi Dahulu NIK!", + RequestID: requestID, + }) + return + } + var response peserta.PesertaResponse + + endpoint := "/Peserta/nik/:nik/tglSEP/" + dateStr + + endpoint = strings.Replace(endpoint, ":nik", nik, 1) + + resp, err := h.service.GetRawResponse(ctx, endpoint) + + if err != nil { + // Check if error message contains 404 status code + if strings.Contains(err.Error(), "HTTP error: 404") { + h.logger.Error("Bynik not found", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + + c.JSON(http.StatusNotFound, models.ErrorResponseBpjs{ + Status: "error", + Message: "Bynik not found", + RequestID: requestID, + }) + return + } + + h.logger.Error("Failed to get Bynik", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &peserta.PesertaData{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + + } else { + // Clean the decrypted response + cleanedResp := cleanResponse(decryptedResp) + if h.isValidJSON(cleanedResp) { + // Unmarshal kembali setelah dibersihkan + err = json.Unmarshal([]byte(cleanedResp), response.Data) + if err != nil { + h.logger.Warn("Failed to unmarshal decrypted response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + "response_preview": cleanedResp[:min(100, len(cleanedResp))], // Log first 100 chars for debugging + }) + // Set Data to nil if unmarshal fails to avoid sending empty struct + response.Data = nil + } + } else { + h.logger.Warn("Invalid JSON in data, storing as string", map[string]interface{}{ + "request_id": requestID, + "response": cleanedResp, + }) + response.Data.RawResponse = cleanedResp + } + + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["peserta"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + // Ambil status code dari metaData.code + var statusCode int + code := extractCode(response.MetaData) + if code != nil { + statusCode = models.GetStatusCodeFromMeta(code) + } else { + statusCode = 200 + } + c.JSON(statusCode, response) +} + +// GetBynokartu godoc +// @Summary Get Bynokartu data +// @Description Get participant eligibility information by card number +// @Tags Peserta +// @Accept json +// @Produce json +// @Security ApiKeyAuth +// @Param X-Request-ID header string false "Request ID for tracking" +// @Param nokartu path string true "nokartu" example("example_value") +// @Success 200 {object} peserta.PesertaResponse "Successfully retrieved Bynokartu data" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - Bynokartu not found" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router /Peserta/nokartu/:nokartu [get] +func (h *PesertaHandler) GetBynokartu(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logger.Error("Database connection failed", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Database connection failed", + RequestID: requestID, + }) + return + } + // Note: dbConn is available for future database operations (e.g., caching, logging) + _ = dbConn // Prevent unused variable warning + + // Context Paramaeter + now := time.Now() + dateStr := now.Format("2006-01-02") + fmt.Println("Date (YYYY-MM-DD):", dateStr) + h.logger.Info("Processing GetBynokartu request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "/Peserta/nokartu/:nokartu/tglSEP/" + dateStr, + "nik": c.Param("nokartu"), + }) + + // Extract path parameters + + nokartu := c.Param("nokartu") + if nokartu == "" || nokartu == ":nokartu" { + + h.logger.Error("Missing required parameter nokartu", map[string]interface{}{ + "request_id": requestID, + }) + + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Parameter Nomor Kartu Bpjs Masih Kosong / Isi Dahulu Nomor Kartu!", + RequestID: requestID, + }) + return + } + var response peserta.PesertaResponse + + endpoint := "/Peserta/nokartu/:nokartu/tglSEP/" + dateStr + + endpoint = strings.Replace(endpoint, ":nokartu", nokartu, 1) + + resp, err := h.service.GetRawResponse(ctx, endpoint) + + if err != nil { + // Check if error message contains 404 status code + if strings.Contains(err.Error(), "HTTP error: 404") { + h.logger.Error("ByNoKartu not found", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + + c.JSON(http.StatusNotFound, models.ErrorResponseBpjs{ + Status: "error", + Message: "ByNoKartu not found", + RequestID: requestID, + }) + return + } + + h.logger.Error("Failed to get ByNoKartu", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &peserta.PesertaData{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + + } else { + // Clean the decrypted response + cleanedResp := cleanResponse(decryptedResp) + err = json.Unmarshal([]byte(cleanedResp), response.Data) + if err != nil { + h.logger.Warn("Failed to unmarshal decrypted response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + "response_preview": cleanedResp[:min(100, len(cleanedResp))], // Log first 100 chars for debugging + }) + // Set Data to nil if unmarshal fails to avoid sending empty struct + response.Data = nil + } + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["peserta"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + // Ambil status code dari metaData.code + var statusCode int + code := extractCode(response.MetaData) + if code != nil { + statusCode = models.GetStatusCodeFromMeta(code) + } else { + statusCode = 200 + } + c.JSON(statusCode, response) +} diff --git a/internal/handlers/retribusi/retribusi.go b/internal/handlers/retribusi/retribusi.go new file mode 100644 index 0000000..b5e9a94 --- /dev/null +++ b/internal/handlers/retribusi/retribusi.go @@ -0,0 +1,1401 @@ +package handlers + +import ( + "api-service/internal/config" + "api-service/internal/database" + models "api-service/internal/models" + "api-service/internal/models/retribusi" + utils "api-service/internal/utils/filters" + "api-service/internal/utils/validation" + "api-service/pkg/logger" + "context" + "database/sql" + "fmt" + "net/http" + "strconv" + "strings" + "sync" + "time" + + "github.com/gin-gonic/gin" + "github.com/go-playground/validator/v10" + "github.com/google/uuid" +) + +var ( + db database.Service + once sync.Once + validate *validator.Validate +) + +// Initialize the database connection and validator +func init() { + once.Do(func() { + db = database.New(config.LoadConfig()) + validate = validator.New() + + // Register custom validations if needed + validate.RegisterValidation("retribusi_status", validateRetribusiStatus) + + if db == nil { + logger.Fatal("Failed to initialize database connection") + } + }) +} + +// Custom validation for retribusi status +func validateRetribusiStatus(fl validator.FieldLevel) bool { + return models.IsValidStatus(fl.Field().String()) +} + +// RetribusiHandler handles retribusi services +type RetribusiHandler struct { + db database.Service +} + +// NewRetribusiHandler creates a new RetribusiHandler +func NewRetribusiHandler() *RetribusiHandler { + return &RetribusiHandler{ + db: db, + } +} + +// GetRetribusi godoc +// @Summary Get retribusi with pagination and optional aggregation +// @Description Returns a paginated list of retribusis with optional summary statistics +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param limit query int false "Limit (max 100)" default(10) +// @Param offset query int false "Offset" default(0) +// @Param include_summary query bool false "Include aggregation summary" default(false) +// @Param status query string false "Filter by status" +// @Param jenis query string false "Filter by jenis" +// @Param dinas query string false "Filter by dinas" +// @Param search query string false "Search in multiple fields" +// @Success 200 {object} retribusi.RetribusiGetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusis [get] +func (h *RetribusiHandler) GetRetribusi(c *gin.Context) { + // Parse pagination parameters + limit, offset, err := h.parsePaginationParams(c) + if err != nil { + h.respondError(c, "Invalid pagination parameters", err, http.StatusBadRequest) + return + } + + // Parse filter parameters + filter := h.parseFilterParams(c) + includeAggregation := c.Query("include_summary") == "true" + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Create context with timeout + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute concurrent operations + var ( + retribusis []retribusi.Retribusi + total int + aggregateData *models.AggregateData + wg sync.WaitGroup + errChan = make(chan error, 3) + mu sync.Mutex + ) + + // Fetch total count + wg.Add(1) + go func() { + defer wg.Done() + if err := h.getTotalCount(ctx, dbConn, filter, &total); err != nil { + mu.Lock() + errChan <- fmt.Errorf("failed to get total count: %w", err) + mu.Unlock() + } + }() + + // Fetch main data + wg.Add(1) + go func() { + defer wg.Done() + result, err := h.fetchRetribusis(ctx, dbConn, filter, limit, offset) + mu.Lock() + if err != nil { + errChan <- fmt.Errorf("failed to fetch data: %w", err) + } else { + retribusis = result + } + mu.Unlock() + }() + + // Fetch aggregation data if requested + if includeAggregation { + wg.Add(1) + go func() { + defer wg.Done() + result, err := h.getAggregateData(ctx, dbConn, filter) + mu.Lock() + if err != nil { + errChan <- fmt.Errorf("failed to get aggregate data: %w", err) + } else { + aggregateData = result + } + mu.Unlock() + }() + } + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + h.logAndRespondError(c, "Data processing failed", err, http.StatusInternalServerError) + return + } + } + + // Build response + meta := h.calculateMeta(limit, offset, total) + response := retribusi.RetribusiGetResponse{ + Message: "Data retribusi berhasil diambil", + Data: retribusis, + Meta: meta, + } + + if includeAggregation && aggregateData != nil { + response.Summary = aggregateData + } + + c.JSON(http.StatusOK, response) +} + +// GetRetribusiByID godoc +// @Summary Get Retribusi by ID +// @Description Returns a single retribusi by ID +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param id path string true "Retribusi ID (UUID)" +// @Success 200 {object} retribusi.RetribusiGetByIDResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "Retribusi not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusi/{id} [get] +func (h *RetribusiHandler) GetRetribusiByID(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + dataretribusi, err := h.getRetribusiByID(ctx, dbConn, id) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "Retribusi not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to get retribusi", err, http.StatusInternalServerError) + } + return + } + + response := retribusi.RetribusiGetByIDResponse{ + Message: "Retribusi details retrieved successfully", + Data: dataretribusi, + } + + c.JSON(http.StatusOK, response) +} + +// GetRetribusiDynamic godoc +// @Summary Get retribusi with dynamic filtering +// @Description Returns retribusis with advanced dynamic filtering like Directus +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param fields query string false "Fields to select (e.g., fields=*.*)" +// @Param filter[column][operator] query string false "Dynamic filters (e.g., filter[Jenis][_eq]=value)" +// @Param sort query string false "Sort fields (e.g., sort=date_created,-Jenis)" +// @Param limit query int false "Limit" default(10) +// @Param offset query int false "Offset" default(0) +// @Success 200 {object} retribusi.RetribusiGetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusis/dynamic [get] +func (h *RetribusiHandler) GetRetribusiDynamic(c *gin.Context) { + // Parse query parameters + parser := utils.NewQueryParser().SetLimits(10, 100) + dynamicQuery, err := parser.ParseQuery(c.Request.URL.Query()) + if err != nil { + h.respondError(c, "Invalid query parameters", err, http.StatusBadRequest) + return + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Create context with timeout + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute query with dynamic filtering + retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, dynamicQuery) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, total) + response := retribusi.RetribusiGetResponse{ + Message: "Data retribusi berhasil diambil", + Data: retribusis, + Meta: meta, + } + + c.JSON(http.StatusOK, response) +} + +// fetchRetribusisDynamic executes dynamic query +func (h *RetribusiHandler) fetchRetribusisDynamic(ctx context.Context, dbConn *sql.DB, query utils.DynamicQuery) ([]retribusi.Retribusi, int, error) { + // Setup query builder + countBuilder := utils.NewQueryBuilder("data_retribusi"). + SetColumnMapping(map[string]string{ + "jenis": "Jenis", + "pelayanan": "Pelayanan", + "dinas": "Dinas", + "kelompok_obyek": "Kelompok_obyek", + "Kode_tarif": "Kode_tarif", + "kode_tarif": "Kode_tarif", + "tarif": "Tarif", + "satuan": "Satuan", + "tarif_overtime": "Tarif_overtime", + "satuan_overtime": "Satuan_overtime", + "rekening_pokok": "Rekening_pokok", + "rekening_denda": "Rekening_denda", + "uraian_1": "Uraian_1", + "uraian_2": "Uraian_2", + "uraian_3": "Uraian_3", + }). + SetAllowedColumns([]string{ + "id", "status", "sort", "user_created", "date_created", + "user_updated", "date_updated", "Jenis", "Pelayanan", + "Dinas", "Kelompok_obyek", "Kode_tarif", "Tarif", "Satuan", + "Tarif_overtime", "Satuan_overtime", "Rekening_pokok", + "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", + }) + + mainBuilder := utils.NewQueryBuilder("data_retribusi"). + SetColumnMapping(map[string]string{ + "jenis": "Jenis", + "pelayanan": "Pelayanan", + "dinas": "Dinas", + "kelompok_obyek": "Kelompok_obyek", + "Kode_tarif": "Kode_tarif", + "kode_tarif": "Kode_tarif", + "tarif": "Tarif", + "satuan": "Satuan", + "tarif_overtime": "Tarif_overtime", + "satuan_overtime": "Satuan_overtime", + "rekening_pokok": "Rekening_pokok", + "rekening_denda": "Rekening_denda", + "uraian_1": "Uraian_1", + "uraian_2": "Uraian_2", + "uraian_3": "Uraian_3", + }). + SetAllowedColumns([]string{ + "id", "status", "sort", "user_created", "date_created", + "user_updated", "date_updated", "Jenis", "Pelayanan", + "Dinas", "Kelompok_obyek", "Kode_tarif", "Tarif", "Satuan", + "Tarif_overtime", "Satuan_overtime", "Rekening_pokok", + "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", + }) + + // Add default filter to exclude deleted records + if len(query.Filters) > 0 { + query.Filters = append([]utils.FilterGroup{{ + Filters: []utils.DynamicFilter{{ + Column: "status", + Operator: utils.OpNotEqual, + Value: "deleted", + }}, + LogicOp: "AND", + }}, query.Filters...) + } else { + query.Filters = []utils.FilterGroup{{ + Filters: []utils.DynamicFilter{{ + Column: "status", + Operator: utils.OpNotEqual, + Value: "deleted", + }}, + LogicOp: "AND", + }} + } + + // Execute queries sequentially to avoid race conditions + var total int + var retribusis []retribusi.Retribusi + + // 1. Get total count first + countQuery := query + countQuery.Limit = 0 + countQuery.Offset = 0 + + countSQL, countArgs, err := countBuilder.BuildCountQuery(countQuery) + if err != nil { + return nil, 0, fmt.Errorf("failed to build count query: %w", err) + } + + if err := dbConn.QueryRowContext(ctx, countSQL, countArgs...).Scan(&total); err != nil { + return nil, 0, fmt.Errorf("failed to get total count: %w", err) + } + + // 2. Get main data + mainSQL, mainArgs, err := mainBuilder.BuildQuery(query) + if err != nil { + return nil, 0, fmt.Errorf("failed to build main query: %w", err) + } + + rows, err := dbConn.QueryContext(ctx, mainSQL, mainArgs...) + if err != nil { + return nil, 0, fmt.Errorf("failed to execute main query: %w", err) + } + defer rows.Close() + + for rows.Next() { + retribusi, err := h.scanRetribusi(rows) + if err != nil { + return nil, 0, fmt.Errorf("failed to scan retribusi: %w", err) + } + retribusis = append(retribusis, retribusi) + } + + if err := rows.Err(); err != nil { + return nil, 0, fmt.Errorf("rows iteration error: %w", err) + } + + return retribusis, total, nil +} + +// SearchRetribusiAdvanced provides advanced search capabilities +func (h *RetribusiHandler) SearchRetribusiAdvanced(c *gin.Context) { + // Parse complex search parameters + searchQuery := c.Query("q") + if searchQuery == "" { + // If no search query provided, return all records with default sorting + query := utils.DynamicQuery{ + Fields: []string{"*"}, + Filters: []utils.FilterGroup{}, // Empty filters - fetchRetribusisDynamic will add default deleted filter + Sort: []utils.SortField{{ + Column: "date_created", + Order: "DESC", + }}, + Limit: 20, + Offset: 0, + } + + // Parse pagination if provided + if limit := c.Query("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { + query.Limit = l + } + } + + if offset := c.Query("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute query to get all records + retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, total) + response := retribusi.RetribusiGetResponse{ + Message: "All records retrieved (no search query provided)", + Data: retribusis, + Meta: meta, + } + + c.JSON(http.StatusOK, response) + return + } + + // Build dynamic query for search + query := utils.DynamicQuery{ + Fields: []string{"*"}, + Filters: []utils.FilterGroup{{ + Filters: []utils.DynamicFilter{ + { + Column: "Jenis", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + { + Column: "Pelayanan", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + { + Column: "Dinas", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + { + Column: "Uraian_1", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + }, + LogicOp: "AND", + }}, + Sort: []utils.SortField{{ + Column: "date_created", + Order: "DESC", + }}, + Limit: 20, + Offset: 0, + } + + // Parse pagination if provided + if limit := c.Query("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { + query.Limit = l + } + } + + if offset := c.Query("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute search + retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Search failed", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, total) + response := retribusi.RetribusiGetResponse{ + Message: fmt.Sprintf("Search results for '%s'", searchQuery), + Data: retribusis, + Meta: meta, + } + + c.JSON(http.StatusOK, response) +} + +// CreateRetribusi godoc +// @Summary Create retribusi +// @Description Creates a new retribusi record +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param request body retribusi.RetribusiCreateRequest true "Retribusi creation request" +// @Success 201 {object} retribusi.RetribusiCreateResponse "Retribusi created successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusis [post] +func (h *RetribusiHandler) CreateRetribusi(c *gin.Context) { + var req retribusi.RetribusiCreateRequest + + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + + // Validate request + if err := validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + // Validate duplicate and daily submission + if err := h.validateRetribusiSubmission(ctx, dbConn, &req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dataretribusi, err := h.createRetribusi(ctx, dbConn, &req) + if err != nil { + h.logAndRespondError(c, "Failed to create retribusi", err, http.StatusInternalServerError) + return + } + + response := retribusi.RetribusiCreateResponse{ + Message: "Retribusi berhasil dibuat", + Data: dataretribusi, + } + + c.JSON(http.StatusCreated, response) +} + +// UpdateRetribusi godoc +// @Summary Update retribusi +// @Description Updates an existing retribusi record +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param id path string true "Retribusi ID (UUID)" +// @Param request body retribusi.RetribusiUpdateRequest true "Retribusi update request" +// @Success 200 {object} retribusi.RetribusiUpdateResponse "Retribusi updated successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 404 {object} models.ErrorResponse "Retribusi not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusi/{id} [put] +func (h *RetribusiHandler) UpdateRetribusi(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + var req retribusi.RetribusiUpdateRequest + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + + // Set ID from path parameter + req.ID = id + + // Validate request + if err := validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + dataretribusi, err := h.updateRetribusi(ctx, dbConn, &req) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "Retribusi not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to update retribusi", err, http.StatusInternalServerError) + } + return + } + + response := retribusi.RetribusiUpdateResponse{ + Message: "Retribusi berhasil diperbarui", + Data: dataretribusi, + } + + c.JSON(http.StatusOK, response) +} + +// DeleteRetribusi godoc +// @Summary Delete retribusi +// @Description Soft deletes a retribusi by setting status to 'deleted' +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param id path string true "Retribusi ID (UUID)" +// @Success 200 {object} retribusi.RetribusiDeleteResponse "Retribusi deleted successfully" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "Retribusi not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusi/{id} [delete] +func (h *RetribusiHandler) DeleteRetribusi(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + err = h.deleteRetribusi(ctx, dbConn, id) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "Retribusi not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to delete retribusi", err, http.StatusInternalServerError) + } + return + } + + response := retribusi.RetribusiDeleteResponse{ + Message: "Retribusi berhasil dihapus", + ID: id, + } + + c.JSON(http.StatusOK, response) +} + +// GetRetribusiStats godoc +// @Summary Get retribusi statistics +// @Description Returns comprehensive statistics about retribusi data +// @Tags Retribusi +// @Accept json +// @Produce json +// @Param status query string false "Filter statistics by status" +// @Success 200 {object} models.AggregateData "Statistics data" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/retribusis/stats [get] +func (h *RetribusiHandler) GetRetribusiStats(c *gin.Context) { + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + filter := h.parseFilterParams(c) + aggregateData, err := h.getAggregateData(ctx, dbConn, filter) + if err != nil { + h.logAndRespondError(c, "Failed to get statistics", err, http.StatusInternalServerError) + return + } + + c.JSON(http.StatusOK, gin.H{ + "message": "Statistik retribusi berhasil diambil", + "data": aggregateData, + }) +} + +// Get retribusi by ID +func (h *RetribusiHandler) getRetribusiByID(ctx context.Context, dbConn *sql.DB, id string) (*retribusi.Retribusi, error) { + query := ` + SELECT + id, status, sort, user_created, date_created, user_updated, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3" + FROM data_retribusi + WHERE id = $1 AND status != 'deleted'` + + row := dbConn.QueryRowContext(ctx, query, id) + + var retribusi retribusi.Retribusi + err := row.Scan( + &retribusi.ID, &retribusi.Status, &retribusi.Sort, &retribusi.UserCreated, + &retribusi.DateCreated, &retribusi.UserUpdated, &retribusi.DateUpdated, + &retribusi.Jenis, &retribusi.Pelayanan, &retribusi.Dinas, &retribusi.KelompokObyek, + &retribusi.KodeTarif, &retribusi.Tarif, &retribusi.Satuan, &retribusi.TarifOvertime, + &retribusi.SatuanOvertime, &retribusi.RekeningPokok, &retribusi.RekeningDenda, + &retribusi.Uraian1, &retribusi.Uraian2, &retribusi.Uraian3, + ) + + if err != nil { + return nil, err + } + + return &retribusi, nil +} + +// Create retribusi +func (h *RetribusiHandler) createRetribusi(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiCreateRequest) (*retribusi.Retribusi, error) { + id := uuid.New().String() + now := time.Now() + + query := ` + INSERT INTO data_retribusi ( + id, status, date_created, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3" + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) + RETURNING + id, status, sort, user_created, date_created, user_updated, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3"` + + row := dbConn.QueryRowContext(ctx, query, + id, req.Status, now, now, + req.Jenis, req.Pelayanan, req.Dinas, req.KelompokObyek, req.KodeTarif, + req.Tarif, req.Satuan, req.TarifOvertime, req.SatuanOvertime, + req.RekeningPokok, req.RekeningDenda, req.Uraian1, req.Uraian2, req.Uraian3, + ) + + var retribusi retribusi.Retribusi + err := row.Scan( + &retribusi.ID, &retribusi.Status, &retribusi.Sort, &retribusi.UserCreated, + &retribusi.DateCreated, &retribusi.UserUpdated, &retribusi.DateUpdated, + &retribusi.Jenis, &retribusi.Pelayanan, &retribusi.Dinas, &retribusi.KelompokObyek, + &retribusi.KodeTarif, &retribusi.Tarif, &retribusi.Satuan, &retribusi.TarifOvertime, + &retribusi.SatuanOvertime, &retribusi.RekeningPokok, &retribusi.RekeningDenda, + &retribusi.Uraian1, &retribusi.Uraian2, &retribusi.Uraian3, + ) + + if err != nil { + return nil, fmt.Errorf("failed to create retribusi: %w", err) + } + + return &retribusi, nil +} + +// Update retribusi +func (h *RetribusiHandler) updateRetribusi(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiUpdateRequest) (*retribusi.Retribusi, error) { + now := time.Now() + + query := ` + UPDATE data_retribusi SET + status = $2, date_updated = $3, + "Jenis" = $4, "Pelayanan" = $5, "Dinas" = $6, "Kelompok_obyek" = $7, "Kode_tarif" = $8, + "Tarif" = $9, "Satuan" = $10, "Tarif_overtime" = $11, "Satuan_overtime" = $12, + "Rekening_pokok" = $13, "Rekening_denda" = $14, "Uraian_1" = $15, "Uraian_2" = $16, "Uraian_3" = $17 + WHERE id = $1 AND status != 'deleted' + RETURNING + id, status, sort, user_created, date_created, user_updated, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3"` + + row := dbConn.QueryRowContext(ctx, query, + req.ID, req.Status, now, + req.Jenis, req.Pelayanan, req.Dinas, req.KelompokObyek, req.KodeTarif, + req.Tarif, req.Satuan, req.TarifOvertime, req.SatuanOvertime, + req.RekeningPokok, req.RekeningDenda, req.Uraian1, req.Uraian2, req.Uraian3, + ) + + var retribusi retribusi.Retribusi + err := row.Scan( + &retribusi.ID, &retribusi.Status, &retribusi.Sort, &retribusi.UserCreated, + &retribusi.DateCreated, &retribusi.UserUpdated, &retribusi.DateUpdated, + &retribusi.Jenis, &retribusi.Pelayanan, &retribusi.Dinas, &retribusi.KelompokObyek, + &retribusi.KodeTarif, &retribusi.Tarif, &retribusi.Satuan, &retribusi.TarifOvertime, + &retribusi.SatuanOvertime, &retribusi.RekeningPokok, &retribusi.RekeningDenda, + &retribusi.Uraian1, &retribusi.Uraian2, &retribusi.Uraian3, + ) + + if err != nil { + return nil, fmt.Errorf("failed to update retribusi: %w", err) + } + + return &retribusi, nil +} + +// Soft delete retribusi +func (h *RetribusiHandler) deleteRetribusi(ctx context.Context, dbConn *sql.DB, id string) error { + now := time.Now() + + query := `UPDATE data_retribusi SET status = 'deleted', date_updated = $2 WHERE id = $1 AND status != 'deleted'` + + result, err := dbConn.ExecContext(ctx, query, id, now) + if err != nil { + return fmt.Errorf("failed to delete retribusi: %w", err) + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("failed to get affected rows: %w", err) + } + + if rowsAffected == 0 { + return sql.ErrNoRows + } + + return nil +} + +// Enhanced error handling +func (h *RetribusiHandler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) { + logger.Error(message, map[string]interface{}{ + "error": err.Error(), + "status_code": statusCode, + }) + h.respondError(c, message, err, statusCode) +} + +func (h *RetribusiHandler) respondError(c *gin.Context, message string, err error, statusCode int) { + errorMessage := message + if gin.Mode() == gin.ReleaseMode { + errorMessage = "Internal server error" + } + + c.JSON(statusCode, models.ErrorResponse{ + Error: errorMessage, + Code: statusCode, + Message: err.Error(), + Timestamp: time.Now(), + }) +} + +// Parse pagination parameters dengan validation yang lebih ketat +func (h *RetribusiHandler) parsePaginationParams(c *gin.Context) (int, int, error) { + limit := 10 // Default limit + offset := 0 // Default offset + + if limitStr := c.Query("limit"); limitStr != "" { + parsedLimit, err := strconv.Atoi(limitStr) + if err != nil { + return 0, 0, fmt.Errorf("invalid limit parameter: %s", limitStr) + } + if parsedLimit <= 0 { + return 0, 0, fmt.Errorf("limit must be greater than 0") + } + if parsedLimit > 100 { + return 0, 0, fmt.Errorf("limit cannot exceed 100") + } + limit = parsedLimit + } + + if offsetStr := c.Query("offset"); offsetStr != "" { + parsedOffset, err := strconv.Atoi(offsetStr) + if err != nil { + return 0, 0, fmt.Errorf("invalid offset parameter: %s", offsetStr) + } + if parsedOffset < 0 { + return 0, 0, fmt.Errorf("offset cannot be negative") + } + offset = parsedOffset + } + + logger.Debug("Pagination parameters", map[string]interface{}{ + "limit": limit, + "offset": offset, + }) + return limit, offset, nil +} + +// Build WHERE clause dengan filter parameters +func (h *RetribusiHandler) buildWhereClause(filter retribusi.RetribusiFilter) (string, []interface{}) { + conditions := []string{"status != 'deleted'"} + args := []interface{}{} + paramCount := 1 + + if filter.Status != nil { + conditions = append(conditions, fmt.Sprintf("status = $%d", paramCount)) + args = append(args, *filter.Status) + paramCount++ + } + + if filter.Jenis != nil { + conditions = append(conditions, fmt.Sprintf(`"Jenis" ILIKE $%d`, paramCount)) + args = append(args, "%"+*filter.Jenis+"%") + paramCount++ + } + + if filter.Dinas != nil { + conditions = append(conditions, fmt.Sprintf(`"Dinas" ILIKE $%d`, paramCount)) + args = append(args, "%"+*filter.Dinas+"%") + paramCount++ + } + + if filter.KelompokObyek != nil { + conditions = append(conditions, fmt.Sprintf(`"Kelompok_obyek" ILIKE $%d`, paramCount)) + args = append(args, "%"+*filter.KelompokObyek+"%") + paramCount++ + } + + if filter.Search != nil { + searchCondition := fmt.Sprintf(`( + "Jenis" ILIKE $%d OR + "Pelayanan" ILIKE $%d OR + "Dinas" ILIKE $%d OR + "Kode_tarif" ILIKE $%d OR + "Uraian_1" ILIKE $%d OR + "Uraian_2" ILIKE $%d OR + "Uraian_3" ILIKE $%d + )`, paramCount, paramCount, paramCount, paramCount, paramCount, paramCount, paramCount) + conditions = append(conditions, searchCondition) + searchTerm := "%" + *filter.Search + "%" + args = append(args, searchTerm) + paramCount++ + } + + if filter.DateFrom != nil { + conditions = append(conditions, fmt.Sprintf("date_created >= $%d", paramCount)) + args = append(args, *filter.DateFrom) + paramCount++ + } + + if filter.DateTo != nil { + conditions = append(conditions, fmt.Sprintf("date_created <= $%d", paramCount)) + args = append(args, filter.DateTo.Add(24*time.Hour-time.Nanosecond)) // End of day + paramCount++ + } + + return strings.Join(conditions, " AND "), args +} + +// Optimized scanning function yang menggunakan sql.Null* types langsung +func (h *RetribusiHandler) scanRetribusi(rows *sql.Rows) (retribusi.Retribusi, error) { + var retribusi retribusi.Retribusi + + return retribusi, rows.Scan( + &retribusi.ID, + &retribusi.Status, + &retribusi.Sort, + &retribusi.UserCreated, + &retribusi.DateCreated, + &retribusi.UserUpdated, + &retribusi.DateUpdated, + &retribusi.Jenis, + &retribusi.Pelayanan, + &retribusi.Dinas, + &retribusi.KelompokObyek, + &retribusi.KodeTarif, + &retribusi.Tarif, + &retribusi.Satuan, + &retribusi.TarifOvertime, + &retribusi.SatuanOvertime, + &retribusi.RekeningPokok, + &retribusi.RekeningDenda, + &retribusi.Uraian1, + &retribusi.Uraian2, + &retribusi.Uraian3, + ) +} + +// Parse filter parameters dari query string +func (h *RetribusiHandler) parseFilterParams(c *gin.Context) retribusi.RetribusiFilter { + filter := retribusi.RetribusiFilter{} + + if status := c.Query("status"); status != "" { + if models.IsValidStatus(status) { + filter.Status = &status + } + } + + if jenis := c.Query("jenis"); jenis != "" { + filter.Jenis = &jenis + } + + if dinas := c.Query("dinas"); dinas != "" { + filter.Dinas = &dinas + } + + if kelompokObyek := c.Query("kelompok_obyek"); kelompokObyek != "" { + filter.KelompokObyek = &kelompokObyek + } + + if search := c.Query("search"); search != "" { + filter.Search = &search + } + + // Parse date filters + if dateFromStr := c.Query("date_from"); dateFromStr != "" { + if dateFrom, err := time.Parse("2006-01-02", dateFromStr); err == nil { + filter.DateFrom = &dateFrom + } + } + + if dateToStr := c.Query("date_to"); dateToStr != "" { + if dateTo, err := time.Parse("2006-01-02", dateToStr); err == nil { + filter.DateTo = &dateTo + } + } + + return filter +} + +// Get comprehensive aggregate data dengan filter support +func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, filter retribusi.RetribusiFilter) (*models.AggregateData, error) { + aggregate := &models.AggregateData{ + ByStatus: make(map[string]int), + ByDinas: make(map[string]int), + ByJenis: make(map[string]int), + } + + // Build where clause untuk filter + whereClause, args := h.buildWhereClause(filter) + + // Use concurrent execution untuk performance + var wg sync.WaitGroup + var mu sync.Mutex + errChan := make(chan error, 4) + + // 1. Count by status + wg.Add(1) + go func() { + defer wg.Done() + statusQuery := fmt.Sprintf(` + SELECT status, COUNT(*) + FROM data_retribusi + WHERE %s + GROUP BY status + ORDER BY status`, whereClause) + + rows, err := dbConn.QueryContext(ctx, statusQuery, args...) + if err != nil { + errChan <- fmt.Errorf("status query failed: %w", err) + return + } + defer rows.Close() + + mu.Lock() + for rows.Next() { + var status string + var count int + if err := rows.Scan(&status, &count); err != nil { + mu.Unlock() + errChan <- fmt.Errorf("status scan failed: %w", err) + return + } + aggregate.ByStatus[status] = count + switch status { + case "active": + aggregate.TotalActive = count + case "draft": + aggregate.TotalDraft = count + case "inactive": + aggregate.TotalInactive = count + } + } + mu.Unlock() + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("status iteration error: %w", err) + } + }() + + // 2. Count by Dinas + wg.Add(1) + go func() { + defer wg.Done() + dinasQuery := fmt.Sprintf(` + SELECT COALESCE("Dinas", 'Unknown') as dinas, COUNT(*) + FROM data_retribusi + WHERE %s AND "Dinas" IS NOT NULL AND TRIM("Dinas") != '' + GROUP BY "Dinas" + ORDER BY COUNT(*) DESC + LIMIT 10`, whereClause) + + rows, err := dbConn.QueryContext(ctx, dinasQuery, args...) + if err != nil { + errChan <- fmt.Errorf("dinas query failed: %w", err) + return + } + defer rows.Close() + + mu.Lock() + for rows.Next() { + var dinas string + var count int + if err := rows.Scan(&dinas, &count); err != nil { + mu.Unlock() + errChan <- fmt.Errorf("dinas scan failed: %w", err) + return + } + aggregate.ByDinas[dinas] = count + } + mu.Unlock() + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("dinas iteration error: %w", err) + } + }() + + // 3. Count by Jenis + wg.Add(1) + go func() { + defer wg.Done() + jenisQuery := fmt.Sprintf(` + SELECT COALESCE("Jenis", 'Unknown') as jenis, COUNT(*) + FROM data_retribusi + WHERE %s AND "Jenis" IS NOT NULL AND TRIM("Jenis") != '' + GROUP BY "Jenis" + ORDER BY COUNT(*) DESC + LIMIT 10`, whereClause) + + rows, err := dbConn.QueryContext(ctx, jenisQuery, args...) + if err != nil { + errChan <- fmt.Errorf("jenis query failed: %w", err) + return + } + defer rows.Close() + + mu.Lock() + for rows.Next() { + var jenis string + var count int + if err := rows.Scan(&jenis, &count); err != nil { + mu.Unlock() + errChan <- fmt.Errorf("jenis scan failed: %w", err) + return + } + aggregate.ByJenis[jenis] = count + } + mu.Unlock() + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("jenis iteration error: %w", err) + } + }() + + // 4. Get last updated time dan today statistics + wg.Add(1) + go func() { + defer wg.Done() + + // Last updated + lastUpdatedQuery := fmt.Sprintf(` + SELECT MAX(date_updated) + FROM data_retribusi + WHERE %s AND date_updated IS NOT NULL`, whereClause) + + var lastUpdated sql.NullTime + if err := dbConn.QueryRowContext(ctx, lastUpdatedQuery, args...).Scan(&lastUpdated); err != nil { + errChan <- fmt.Errorf("last updated query failed: %w", err) + return + } + + // Today statistics + today := time.Now().Format("2006-01-02") + todayStatsQuery := fmt.Sprintf(` + SELECT + SUM(CASE WHEN DATE(date_created) = $%d THEN 1 ELSE 0 END) as created_today, + SUM(CASE WHEN DATE(date_updated) = $%d AND DATE(date_created) != $%d THEN 1 ELSE 0 END) as updated_today + FROM data_retribusi + WHERE %s`, len(args)+1, len(args)+1, len(args)+1, whereClause) + + todayArgs := append(args, today) + var createdToday, updatedToday int + if err := dbConn.QueryRowContext(ctx, todayStatsQuery, todayArgs...).Scan(&createdToday, &updatedToday); err != nil { + errChan <- fmt.Errorf("today stats query failed: %w", err) + return + } + + mu.Lock() + if lastUpdated.Valid { + aggregate.LastUpdated = &lastUpdated.Time + } + aggregate.CreatedToday = createdToday + aggregate.UpdatedToday = updatedToday + mu.Unlock() + }() + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + return nil, err + } + } + + return aggregate, nil +} + +// Get total count dengan filter support +func (h *RetribusiHandler) getTotalCount(ctx context.Context, dbConn *sql.DB, filter retribusi.RetribusiFilter, total *int) error { + whereClause, args := h.buildWhereClause(filter) + countQuery := fmt.Sprintf(`SELECT COUNT(*) FROM data_retribusi WHERE %s`, whereClause) + + if err := dbConn.QueryRowContext(ctx, countQuery, args...).Scan(total); err != nil { + return fmt.Errorf("total count query failed: %w", err) + } + + return nil +} + +// Enhanced fetchRetribusis dengan filter support +func (h *RetribusiHandler) fetchRetribusis(ctx context.Context, dbConn *sql.DB, filter retribusi.RetribusiFilter, limit, offset int) ([]retribusi.Retribusi, error) { + whereClause, args := h.buildWhereClause(filter) + + // Build the main query with pagination + query := fmt.Sprintf(` + SELECT + id, status, sort, user_created, date_created, user_updated, date_updated, + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3" + FROM data_retribusi + WHERE %s + ORDER BY date_created DESC NULLS LAST + LIMIT $%d OFFSET $%d`, + whereClause, len(args)+1, len(args)+2) + + // Add pagination parameters + args = append(args, limit, offset) + + rows, err := dbConn.QueryContext(ctx, query, args...) + if err != nil { + return nil, fmt.Errorf("fetch retribusis query failed: %w", err) + } + defer rows.Close() + + // Pre-allocate slice dengan kapasitas yang tepat + retribusis := make([]retribusi.Retribusi, 0, limit) + + for rows.Next() { + retribusi, err := h.scanRetribusi(rows) + if err != nil { + return nil, fmt.Errorf("scan retribusi failed: %w", err) + } + retribusis = append(retribusis, retribusi) + } + + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("rows iteration error: %w", err) + } + + logger.Info("Successfully fetched retribusis", map[string]interface{}{ + "count": len(retribusis), + "limit": limit, + "offset": offset, + }) + return retribusis, nil +} + +// Calculate pagination metadata +func (h *RetribusiHandler) calculateMeta(limit, offset, total int) models.MetaResponse { + totalPages := 0 + currentPage := 1 + + if limit > 0 { + totalPages = (total + limit - 1) / limit // Ceiling division + currentPage = (offset / limit) + 1 + } + + return models.MetaResponse{ + Limit: limit, + Offset: offset, + Total: total, + TotalPages: totalPages, + CurrentPage: currentPage, + HasNext: offset+limit < total, + HasPrev: offset > 0, + } +} + +// validateRetribusiSubmission performs validation for duplicate entries and daily submission limits +func (h *RetribusiHandler) validateRetribusiSubmission(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiCreateRequest) error { + // Import the validation utility + validator := validation.NewDuplicateValidator(dbConn) + + // Use default retribusi configuration + config := validation.DefaultRetribusiConfig() + + // Validate duplicate entries with active status for today + err := validator.ValidateDuplicate(ctx, config, "dummy_id") + if err != nil { + return fmt.Errorf("validation failed: %w", err) + } + + // Validate once per day submission + err = validator.ValidateOncePerDay(ctx, "data_retribusi", "id", "date_created", "daily_limit") + if err != nil { + return fmt.Errorf("daily submission limit exceeded: %w", err) + } + + return nil +} + +// Example usage of the validation utility with custom configuration +func (h *RetribusiHandler) validateWithCustomConfig(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiCreateRequest) error { + // Create validator instance + validator := validation.NewDuplicateValidator(dbConn) + + // Use custom configuration + config := validation.ValidationConfig{ + TableName: "data_retribusi", + IDColumn: "id", + StatusColumn: "status", + DateColumn: "date_created", + ActiveStatuses: []string{"active", "draft"}, + AdditionalFields: map[string]interface{}{ + "jenis": req.Jenis, + "dinas": req.Dinas, + }, + } + + // Validate with custom fields + fields := map[string]interface{}{ + "jenis": *req.Jenis, + "dinas": *req.Dinas, + } + + err := validator.ValidateDuplicateWithCustomFields(ctx, config, fields) + if err != nil { + return fmt.Errorf("custom validation failed: %w", err) + } + + return nil +} + +// GetLastSubmissionTime example +func (h *RetribusiHandler) getLastSubmissionTimeExample(ctx context.Context, dbConn *sql.DB, identifier string) (*time.Time, error) { + validator := validation.NewDuplicateValidator(dbConn) + return validator.GetLastSubmissionTime(ctx, "data_retribusi", "id", "date_created", identifier) +} diff --git a/internal/helpers/bpjs/lz-string.go b/internal/helpers/bpjs/lz-string.go new file mode 100644 index 0000000..c05dd71 --- /dev/null +++ b/internal/helpers/bpjs/lz-string.go @@ -0,0 +1,83 @@ +package helper + +import ( + "log" + + lzstring "github.com/daku10/go-lz-string" +) + +// StringDecrypt - langsung coba decompress tanpa decrypt ulang +func StringDecrypt(key string, encryptedString string) (string, error) { + log.Printf("StringDecrypt: Attempting decompression, data length: %d", len(encryptedString)) + + // Method 1: Try direct LZ-string decompression (data sudah didecrypt di response.go) + if result, err := lzstring.DecompressFromEncodedURIComponent(encryptedString); err == nil && len(result) > 0 { + log.Printf("StringDecrypt: Direct decompression successful") + return result, nil + } + + // Method 2: Try other LZ-string methods + if result, err := lzstring.DecompressFromBase64(encryptedString); err == nil && len(result) > 0 { + log.Printf("StringDecrypt: Base64 decompression successful") + return result, nil + } + + // Method 3: If all fail, return the original string + log.Printf("StringDecrypt: All decompression failed, returning original data") + return encryptedString, nil +} + +func RemovePKCS7Padding(data []byte) []byte { + if len(data) == 0 { + return data + } + + paddingLength := int(data[len(data)-1]) + + // Validasi padding length + if paddingLength > len(data) || paddingLength == 0 || paddingLength > 16 { + log.Printf("RemovePKCS7Padding: Invalid padding length: %d, data length: %d", paddingLength, len(data)) + // Coba kembalikan data tanpa byte terakhir jika padding tampak salah + if len(data) > 1 { + return data[:len(data)-1] + } + return data + } + + // Verify all padding bytes are correct + paddingStart := len(data) - paddingLength + for i := paddingStart; i < len(data); i++ { + if data[i] != byte(paddingLength) { + log.Printf("RemovePKCS7Padding: Invalid padding byte at position %d, expected %d, got %d", i, paddingLength, data[i]) + // Jika padding tidak valid, coba cari padding yang benar + return findValidPadding(data) + } + } + + result := data[:paddingStart] + log.Printf("RemovePKCS7Padding: Successfully removed %d padding bytes", paddingLength) + return result +} + +// Fungsi baru untuk mencari padding yang valid +func findValidPadding(data []byte) []byte { + dataLen := len(data) + + // Coba berbagai kemungkinan padding length (1-16) + for padLen := 1; padLen <= 16 && padLen <= dataLen; padLen++ { + valid := true + for i := dataLen - padLen; i < dataLen; i++ { + if data[i] != byte(padLen) { + valid = false + break + } + } + if valid { + log.Printf("RemovePKCS7Padding: Found valid padding of length %d", padLen) + return data[:dataLen-padLen] + } + } + + log.Printf("RemovePKCS7Padding: No valid padding found, returning original data") + return data +} diff --git a/internal/helpers/bpjs/pkcs7.go b/internal/helpers/bpjs/pkcs7.go new file mode 100644 index 0000000..119de76 --- /dev/null +++ b/internal/helpers/bpjs/pkcs7.go @@ -0,0 +1,25 @@ +package helper + +import "errors" + +func Pad(buf []byte, size int) ([]byte, error) { + bufLen := len(buf) + padLen := size - bufLen%size + padded := make([]byte, bufLen+padLen) + copy(padded, buf) + for i := 0; i < padLen; i++ { + padded[bufLen+i] = byte(padLen) + } + return padded, nil +} + +func Unpad(padded []byte, size int) ([]byte, error) { + if len(padded)%size != 0 { + return nil, errors.New("pkcs7: Padded value wasn't in correct size.") + } + + bufLen := len(padded) - int(padded[len(padded)-1]) + buf := make([]byte, bufLen) + copy(buf, padded[:bufLen]) + return buf, nil +} \ No newline at end of file diff --git a/internal/middleware/auth_middleware.go b/internal/middleware/auth_middleware.go new file mode 100644 index 0000000..1d3969c --- /dev/null +++ b/internal/middleware/auth_middleware.go @@ -0,0 +1,59 @@ +package middleware + +import ( + "fmt" + "net/http" + + "api-service/internal/config" + + "github.com/gin-gonic/gin" +) + +// ConfigurableAuthMiddleware provides flexible authentication based on configuration +func ConfigurableAuthMiddleware(cfg *config.Config) gin.HandlerFunc { + return func(c *gin.Context) { + // Skip authentication for development/testing if explicitly disabled + if !cfg.Keycloak.Enabled { + fmt.Println("Authentication is disabled - allowing all requests") + c.Next() + return + } + + // Use Keycloak authentication when enabled + AuthMiddleware()(c) + } +} + +// StrictAuthMiddleware enforces authentication regardless of Keycloak.Enabled setting +func StrictAuthMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + if appConfig == nil { + fmt.Println("AuthMiddleware: Config not initialized") + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "authentication service not configured"}) + return + } + + // Always enforce authentication + AuthMiddleware()(c) + } +} + +// OptionalKeycloakAuthMiddleware allows requests but adds authentication info if available +func OptionalKeycloakAuthMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + if appConfig == nil || !appConfig.Keycloak.Enabled { + c.Next() + return + } + + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + // No token provided, but continue + c.Next() + return + } + + // Try to validate token, but don't fail if invalid + AuthMiddleware()(c) + } +} diff --git a/internal/middleware/error_handler.go b/internal/middleware/error_handler.go new file mode 100644 index 0000000..7f6ab82 --- /dev/null +++ b/internal/middleware/error_handler.go @@ -0,0 +1,54 @@ +package middleware + +import ( + models "api-service/internal/models" + "net/http" + + "github.com/gin-gonic/gin" +) + +// ErrorHandler handles errors globally +func ErrorHandler() gin.HandlerFunc { + return func(c *gin.Context) { + c.Next() + + if len(c.Errors) > 0 { + err := c.Errors.Last() + status := http.StatusInternalServerError + + // Determine status code based on error type + switch err.Type { + case gin.ErrorTypeBind: + status = http.StatusBadRequest + case gin.ErrorTypeRender: + status = http.StatusUnprocessableEntity + case gin.ErrorTypePrivate: + status = http.StatusInternalServerError + } + + response := models.ErrorResponse{ + Error: "internal_error", + Message: err.Error(), + Code: status, + } + + c.JSON(status, response) + } + } +} + +// CORS middleware configuration +func CORSConfig() gin.HandlerFunc { + return gin.HandlerFunc(func(c *gin.Context) { + c.Header("Access-Control-Allow-Origin", "*") + c.Header("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS, PATCH") + c.Header("Access-Control-Allow-Headers", "Origin, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization") + + if c.Request.Method == "OPTIONS" { + c.AbortWithStatus(204) + return + } + + c.Next() + }) +} diff --git a/internal/middleware/jwt_middleware.go b/internal/middleware/jwt_middleware.go new file mode 100644 index 0000000..708ef7f --- /dev/null +++ b/internal/middleware/jwt_middleware.go @@ -0,0 +1,77 @@ +package middleware + +import ( + services "api-service/internal/services/auth" + "net/http" + "strings" + + "github.com/gin-gonic/gin" +) + +// JWTAuthMiddleware validates JWT tokens generated by our auth service +func JWTAuthMiddleware(authService *services.AuthService) gin.HandlerFunc { + return func(c *gin.Context) { + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header missing"}) + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header format must be Bearer {token}"}) + return + } + + tokenString := parts[1] + + // Validate token + claims, err := authService.ValidateToken(tokenString) + if err != nil { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": err.Error()}) + return + } + + // Set user info in context + c.Set("user_id", claims.UserID) + c.Set("username", claims.Username) + c.Set("email", claims.Email) + c.Set("role", claims.Role) + + c.Next() + } +} + +// OptionalAuthMiddleware allows both authenticated and unauthenticated requests +func OptionalAuthMiddleware(authService *services.AuthService) gin.HandlerFunc { + return func(c *gin.Context) { + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + // No token provided, but continue + c.Next() + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + c.Next() + return + } + + tokenString := parts[1] + claims, err := authService.ValidateToken(tokenString) + if err != nil { + // Invalid token, but continue (don't abort) + c.Next() + return + } + + // Set user info in context + c.Set("user_id", claims.UserID) + c.Set("username", claims.Username) + c.Set("email", claims.Email) + c.Set("role", claims.Role) + + c.Next() + } +} diff --git a/internal/middleware/keycloak_middleware.go b/internal/middleware/keycloak_middleware.go new file mode 100644 index 0000000..a336154 --- /dev/null +++ b/internal/middleware/keycloak_middleware.go @@ -0,0 +1,254 @@ +package middleware + +/** Keycloak Auth Middleware **/ +import ( + "crypto/rsa" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "math/big" + "net/http" + "strings" + "sync" + "time" + + "api-service/internal/config" + + "github.com/gin-gonic/gin" + "github.com/golang-jwt/jwt/v5" + "golang.org/x/sync/singleflight" +) + +var ( + ErrInvalidToken = errors.New("invalid token") +) + +// JwksCache caches JWKS keys with expiration +type JwksCache struct { + mu sync.RWMutex + keys map[string]*rsa.PublicKey + expiresAt time.Time + sfGroup singleflight.Group + config *config.Config +} + +func NewJwksCache(cfg *config.Config) *JwksCache { + return &JwksCache{ + keys: make(map[string]*rsa.PublicKey), + config: cfg, + } +} + +func (c *JwksCache) GetKey(kid string) (*rsa.PublicKey, error) { + c.mu.RLock() + if key, ok := c.keys[kid]; ok && time.Now().Before(c.expiresAt) { + c.mu.RUnlock() + return key, nil + } + c.mu.RUnlock() + + // Fetch keys with singleflight to avoid concurrent fetches + v, err, _ := c.sfGroup.Do("fetch_jwks", func() (interface{}, error) { + return c.fetchKeys() + }) + if err != nil { + return nil, err + } + + keys := v.(map[string]*rsa.PublicKey) + + c.mu.Lock() + c.keys = keys + c.expiresAt = time.Now().Add(1 * time.Hour) // cache for 1 hour + c.mu.Unlock() + + key, ok := keys[kid] + if !ok { + return nil, fmt.Errorf("key with kid %s not found", kid) + } + return key, nil +} + +func (c *JwksCache) fetchKeys() (map[string]*rsa.PublicKey, error) { + if !c.config.Keycloak.Enabled { + return nil, fmt.Errorf("keycloak authentication is disabled") + } + + jwksURL := c.config.Keycloak.JwksURL + if jwksURL == "" { + // Construct JWKS URL from issuer if not explicitly provided + jwksURL = c.config.Keycloak.Issuer + "/protocol/openid-connect/certs" + } + + resp, err := http.Get(jwksURL) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + var jwksData struct { + Keys []struct { + Kid string `json:"kid"` + Kty string `json:"kty"` + N string `json:"n"` + E string `json:"e"` + } `json:"keys"` + } + + if err := json.NewDecoder(resp.Body).Decode(&jwksData); err != nil { + return nil, err + } + + keys := make(map[string]*rsa.PublicKey) + for _, key := range jwksData.Keys { + if key.Kty != "RSA" { + continue + } + pubKey, err := parseRSAPublicKey(key.N, key.E) + if err != nil { + continue + } + keys[key.Kid] = pubKey + } + return keys, nil +} + +// parseRSAPublicKey parses RSA public key components from base64url strings +func parseRSAPublicKey(nStr, eStr string) (*rsa.PublicKey, error) { + nBytes, err := base64UrlDecode(nStr) + if err != nil { + return nil, err + } + eBytes, err := base64UrlDecode(eStr) + if err != nil { + return nil, err + } + + var eInt int + for _, b := range eBytes { + eInt = eInt<<8 + int(b) + } + + pubKey := &rsa.PublicKey{ + N: new(big.Int).SetBytes(nBytes), + E: eInt, + } + return pubKey, nil +} + +func base64UrlDecode(s string) ([]byte, error) { + // Add padding if missing + if m := len(s) % 4; m != 0 { + s += strings.Repeat("=", 4-m) + } + return base64.URLEncoding.DecodeString(s) +} + +// Global config instance +var appConfig *config.Config +var jwksCacheInstance *JwksCache + +// InitializeAuth initializes the auth middleware with config +func InitializeAuth(cfg *config.Config) { + appConfig = cfg + jwksCacheInstance = NewJwksCache(cfg) +} + +// AuthMiddleware validates Bearer token as Keycloak JWT token +func AuthMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + if appConfig == nil { + fmt.Println("AuthMiddleware: Config not initialized") + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "authentication service not configured"}) + return + } + + if !appConfig.Keycloak.Enabled { + // Skip authentication if Keycloak is disabled but log for debugging + fmt.Println("AuthMiddleware: Keycloak authentication is disabled - allowing all requests") + c.Next() + return + } + + fmt.Println("AuthMiddleware: Checking Authorization header") // Debug log + + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + fmt.Println("AuthMiddleware: Authorization header missing") // Debug log + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header missing"}) + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + fmt.Println("AuthMiddleware: Invalid Authorization header format") // Debug log + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header format must be Bearer {token}"}) + return + } + + tokenString := parts[1] + + token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { + // Verify signing method + if _, ok := token.Method.(*jwt.SigningMethodRSA); !ok { + fmt.Printf("AuthMiddleware: Unexpected signing method: %v\n", token.Header["alg"]) // Debug log + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + + kid, ok := token.Header["kid"].(string) + if !ok { + fmt.Println("AuthMiddleware: kid header not found") // Debug log + return nil, errors.New("kid header not found") + } + + return jwksCacheInstance.GetKey(kid) + }, jwt.WithIssuer(appConfig.Keycloak.Issuer), jwt.WithAudience(appConfig.Keycloak.Audience)) + + if err != nil || !token.Valid { + fmt.Printf("AuthMiddleware: Invalid or expired token: %v\n", err) // Debug log + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid or expired token"}) + return + } + + fmt.Println("AuthMiddleware: Token valid, proceeding") // Debug log + // Token is valid, proceed + c.Next() + } +} + +/** JWT Bearer authentication middleware */ +// import ( +// "net/http" +// "strings" + +// "github.com/gin-gonic/gin" +// ) + +// AuthMiddleware validates Bearer token in Authorization header +func AuthJWTMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header missing"}) + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header format must be Bearer {token}"}) + return + } + + token := parts[1] + // For now, use a static token for validation. Replace with your logic. + const validToken = "your-static-token" + + if token != validToken { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid token"}) + return + } + + c.Next() + } +} diff --git a/internal/models/auth/auth.go b/internal/models/auth/auth.go new file mode 100644 index 0000000..872b45a --- /dev/null +++ b/internal/models/auth/auth.go @@ -0,0 +1,31 @@ +package models + +// LoginRequest represents the login request payload +type LoginRequest struct { + Username string `json:"username" binding:"required"` + Password string `json:"password" binding:"required"` +} + +// TokenResponse represents the token response +type TokenResponse struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type"` + ExpiresIn int64 `json:"expires_in"` +} + +// JWTClaims represents the JWT claims +type JWTClaims struct { + UserID string `json:"user_id"` + Username string `json:"username"` + Email string `json:"email"` + Role string `json:"role"` +} + +// User represents a user for authentication +type User struct { + ID string `json:"id"` + Username string `json:"username"` + Email string `json:"email"` + Password string `json:"-"` + Role string `json:"role"` +} diff --git a/internal/models/models.go b/internal/models/models.go new file mode 100644 index 0000000..2643ef8 --- /dev/null +++ b/internal/models/models.go @@ -0,0 +1,221 @@ +package models + +import ( + "database/sql" + "database/sql/driver" + "net/http" + "strconv" + "time" +) + +// NullableInt32 - your existing implementation +type NullableInt32 struct { + Int32 int32 `json:"int32,omitempty"` + Valid bool `json:"valid"` +} + +// Scan implements the sql.Scanner interface for NullableInt32 +func (n *NullableInt32) Scan(value interface{}) error { + var ni sql.NullInt32 + if err := ni.Scan(value); err != nil { + return err + } + n.Int32 = ni.Int32 + n.Valid = ni.Valid + return nil +} + +// Value implements the driver.Valuer interface for NullableInt32 +func (n NullableInt32) Value() (driver.Value, error) { + if !n.Valid { + return nil, nil + } + return n.Int32, nil +} + +// NullableString provides consistent nullable string handling +type NullableString struct { + String string `json:"string,omitempty"` + Valid bool `json:"valid"` +} + +// Scan implements the sql.Scanner interface for NullableString +func (n *NullableString) Scan(value interface{}) error { + var ns sql.NullString + if err := ns.Scan(value); err != nil { + return err + } + n.String = ns.String + n.Valid = ns.Valid + return nil +} + +// Value implements the driver.Valuer interface for NullableString +func (n NullableString) Value() (driver.Value, error) { + if !n.Valid { + return nil, nil + } + return n.String, nil +} + +// NullableTime provides consistent nullable time handling +type NullableTime struct { + Time time.Time `json:"time,omitempty"` + Valid bool `json:"valid"` +} + +// Scan implements the sql.Scanner interface for NullableTime +func (n *NullableTime) Scan(value interface{}) error { + var nt sql.NullTime + if err := nt.Scan(value); err != nil { + return err + } + n.Time = nt.Time + n.Valid = nt.Valid + return nil +} + +// Value implements the driver.Valuer interface for NullableTime +func (n NullableTime) Value() (driver.Value, error) { + if !n.Valid { + return nil, nil + } + return n.Time, nil +} + +// Metadata untuk pagination - dioptimalkan +type MetaResponse struct { + Limit int `json:"limit"` + Offset int `json:"offset"` + Total int `json:"total"` + TotalPages int `json:"total_pages"` + CurrentPage int `json:"current_page"` + HasNext bool `json:"has_next"` + HasPrev bool `json:"has_prev"` +} + +// Aggregate data untuk summary +type AggregateData struct { + TotalActive int `json:"total_active"` + TotalDraft int `json:"total_draft"` + TotalInactive int `json:"total_inactive"` + ByStatus map[string]int `json:"by_status"` + ByDinas map[string]int `json:"by_dinas,omitempty"` + ByJenis map[string]int `json:"by_jenis,omitempty"` + LastUpdated *time.Time `json:"last_updated,omitempty"` + CreatedToday int `json:"created_today"` + UpdatedToday int `json:"updated_today"` +} + +// Error response yang konsisten +type ErrorResponse struct { + Error string `json:"error"` + Code int `json:"code"` + Message string `json:"message"` + Timestamp time.Time `json:"timestamp"` +} + +// BaseRequest contains common fields for all BPJS requests +type BaseRequest struct { + RequestID string `json:"request_id,omitempty"` + Timestamp time.Time `json:"timestamp,omitempty"` +} + +// BaseResponse contains common response fields +type BaseResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + RequestID string `json:"request_id,omitempty"` + Timestamp string `json:"timestamp,omitempty"` +} + +// ErrorResponse represents error response structure +type ErrorResponseBpjs struct { + Status string `json:"status"` + Message string `json:"message"` + RequestID string `json:"request_id,omitempty"` + Errors map[string]interface{} `json:"errors,omitempty"` + Code string `json:"code,omitempty"` +} + +// PaginationRequest contains pagination parameters +type PaginationRequest struct { + Page int `json:"page" validate:"min=1"` + Limit int `json:"limit" validate:"min=1,max=100"` + SortBy string `json:"sort_by,omitempty"` + SortDir string `json:"sort_dir,omitempty" validate:"omitempty,oneof=asc desc"` +} + +// PaginationResponse contains pagination metadata +type PaginationResponse struct { + CurrentPage int `json:"current_page"` + TotalPages int `json:"total_pages"` + TotalItems int64 `json:"total_items"` + ItemsPerPage int `json:"items_per_page"` + HasNext bool `json:"has_next"` + HasPrev bool `json:"has_previous"` +} + +// MetaInfo contains additional metadata +type MetaInfo struct { + Version string `json:"version"` + Environment string `json:"environment"` + ServerTime string `json:"server_time"` +} + +func GetStatusCodeFromMeta(metaCode interface{}) int { + statusCode := http.StatusOK + + if metaCode != nil { + switch v := metaCode.(type) { + case string: + if code, err := strconv.Atoi(v); err == nil { + if code >= 100 && code <= 599 { + statusCode = code + } else { + statusCode = http.StatusInternalServerError + } + } else { + statusCode = http.StatusInternalServerError + } + case int: + if v >= 100 && v <= 599 { + statusCode = v + } else { + statusCode = http.StatusInternalServerError + } + case float64: + code := int(v) + if code >= 100 && code <= 599 { + statusCode = code + } else { + statusCode = http.StatusInternalServerError + } + default: + statusCode = http.StatusInternalServerError + } + } + + return statusCode +} + +// Validation constants +const ( + StatusDraft = "draft" + StatusActive = "active" + StatusInactive = "inactive" + StatusDeleted = "deleted" +) + +// ValidStatuses untuk validasi +var ValidStatuses = []string{StatusDraft, StatusActive, StatusInactive} + +// IsValidStatus helper function +func IsValidStatus(status string) bool { + for _, validStatus := range ValidStatuses { + if status == validStatus { + return true + } + } + return false +} diff --git a/internal/models/retribusi/retribusi.go b/internal/models/retribusi/retribusi.go new file mode 100644 index 0000000..7907527 --- /dev/null +++ b/internal/models/retribusi/retribusi.go @@ -0,0 +1,228 @@ +package retribusi + +import ( + "api-service/internal/models" + "encoding/json" + "time" +) + +// Retribusi represents the data structure for the retribusi table +// with proper null handling and optimized JSON marshaling +type Retribusi struct { + ID string `json:"id" db:"id"` + Status string `json:"status" db:"status"` + Sort models.NullableInt32 `json:"sort,omitempty" db:"sort"` + UserCreated models.NullableString `json:"user_created,omitempty" db:"user_created"` + DateCreated models.NullableTime `json:"date_created,omitempty" db:"date_created"` + UserUpdated models.NullableString `json:"user_updated,omitempty" db:"user_updated"` + DateUpdated models.NullableTime `json:"date_updated,omitempty" db:"date_updated"` + Jenis models.NullableString `json:"jenis,omitempty" db:"Jenis"` + Pelayanan models.NullableString `json:"pelayanan,omitempty" db:"Pelayanan"` + Dinas models.NullableString `json:"dinas,omitempty" db:"Dinas"` + KelompokObyek models.NullableString `json:"kelompok_obyek,omitempty" db:"Kelompok_obyek"` + KodeTarif models.NullableString `json:"kode_tarif,omitempty" db:"Kode_tarif"` + Tarif models.NullableString `json:"tarif,omitempty" db:"Tarif"` + Satuan models.NullableString `json:"satuan,omitempty" db:"Satuan"` + TarifOvertime models.NullableString `json:"tarif_overtime,omitempty" db:"Tarif_overtime"` + SatuanOvertime models.NullableString `json:"satuan_overtime,omitempty" db:"Satuan_overtime"` + RekeningPokok models.NullableString `json:"rekening_pokok,omitempty" db:"Rekening_pokok"` + RekeningDenda models.NullableString `json:"rekening_denda,omitempty" db:"Rekening_denda"` + Uraian1 models.NullableString `json:"uraian_1,omitempty" db:"Uraian_1"` + Uraian2 models.NullableString `json:"uraian_2,omitempty" db:"Uraian_2"` + Uraian3 models.NullableString `json:"uraian_3,omitempty" db:"Uraian_3"` +} + +// Custom JSON marshaling untuk Retribusi agar NULL values tidak muncul di response +func (r Retribusi) MarshalJSON() ([]byte, error) { + type Alias Retribusi + aux := &struct { + Sort *int `json:"sort,omitempty"` + UserCreated *string `json:"user_created,omitempty"` + DateCreated *time.Time `json:"date_created,omitempty"` + UserUpdated *string `json:"user_updated,omitempty"` + DateUpdated *time.Time `json:"date_updated,omitempty"` + Jenis *string `json:"jenis,omitempty"` + Pelayanan *string `json:"pelayanan,omitempty"` + Dinas *string `json:"dinas,omitempty"` + KelompokObyek *string `json:"kelompok_obyek,omitempty"` + KodeTarif *string `json:"kode_tarif,omitempty"` + Tarif *string `json:"tarif,omitempty"` + Satuan *string `json:"satuan,omitempty"` + TarifOvertime *string `json:"tarif_overtime,omitempty"` + SatuanOvertime *string `json:"satuan_overtime,omitempty"` + RekeningPokok *string `json:"rekening_pokok,omitempty"` + RekeningDenda *string `json:"rekening_denda,omitempty"` + Uraian1 *string `json:"uraian_1,omitempty"` + Uraian2 *string `json:"uraian_2,omitempty"` + Uraian3 *string `json:"uraian_3,omitempty"` + *Alias + }{ + Alias: (*Alias)(&r), + } + + // Convert NullableInt32 to pointer + if r.Sort.Valid { + sort := int(r.Sort.Int32) + aux.Sort = &sort + } + if r.UserCreated.Valid { + aux.UserCreated = &r.UserCreated.String + } + if r.DateCreated.Valid { + aux.DateCreated = &r.DateCreated.Time + } + if r.UserUpdated.Valid { + aux.UserUpdated = &r.UserUpdated.String + } + if r.DateUpdated.Valid { + aux.DateUpdated = &r.DateUpdated.Time + } + if r.Jenis.Valid { + aux.Jenis = &r.Jenis.String + } + if r.Pelayanan.Valid { + aux.Pelayanan = &r.Pelayanan.String + } + if r.Dinas.Valid { + aux.Dinas = &r.Dinas.String + } + if r.KelompokObyek.Valid { + aux.KelompokObyek = &r.KelompokObyek.String + } + if r.KodeTarif.Valid { + aux.KodeTarif = &r.KodeTarif.String + } + if r.Tarif.Valid { + aux.Tarif = &r.Tarif.String + } + if r.Satuan.Valid { + aux.Satuan = &r.Satuan.String + } + if r.TarifOvertime.Valid { + aux.TarifOvertime = &r.TarifOvertime.String + } + if r.SatuanOvertime.Valid { + aux.SatuanOvertime = &r.SatuanOvertime.String + } + if r.RekeningPokok.Valid { + aux.RekeningPokok = &r.RekeningPokok.String + } + if r.RekeningDenda.Valid { + aux.RekeningDenda = &r.RekeningDenda.String + } + if r.Uraian1.Valid { + aux.Uraian1 = &r.Uraian1.String + } + if r.Uraian2.Valid { + aux.Uraian2 = &r.Uraian2.String + } + if r.Uraian3.Valid { + aux.Uraian3 = &r.Uraian3.String + } + + return json.Marshal(aux) +} + +// Helper methods untuk mendapatkan nilai yang aman +func (r *Retribusi) GetJenis() string { + if r.Jenis.Valid { + return r.Jenis.String + } + return "" +} + +func (r *Retribusi) GetDinas() string { + if r.Dinas.Valid { + return r.Dinas.String + } + return "" +} + +func (r *Retribusi) GetTarif() string { + if r.Tarif.Valid { + return r.Tarif.String + } + return "" +} + +// Response struct untuk GET by ID - diperbaiki struktur +type RetribusiGetByIDResponse struct { + Message string `json:"message"` + Data *Retribusi `json:"data"` +} + +// Request struct untuk create - dioptimalkan dengan validasi +type RetribusiCreateRequest struct { + Status string `json:"status" validate:"required,oneof=draft active inactive"` + Jenis *string `json:"jenis,omitempty" validate:"omitempty,min=1,max=255"` + Pelayanan *string `json:"pelayanan,omitempty" validate:"omitempty,min=1,max=255"` + Dinas *string `json:"dinas,omitempty" validate:"omitempty,min=1,max=255"` + KelompokObyek *string `json:"kelompok_obyek,omitempty" validate:"omitempty,min=1,max=255"` + KodeTarif *string `json:"kode_tarif,omitempty" validate:"omitempty,min=1,max=255"` + Uraian1 *string `json:"uraian_1,omitempty"` + Uraian2 *string `json:"uraian_2,omitempty"` + Uraian3 *string `json:"uraian_3,omitempty"` + Tarif *string `json:"tarif,omitempty" validate:"omitempty,numeric"` + Satuan *string `json:"satuan,omitempty" validate:"omitempty,min=1,max=255"` + TarifOvertime *string `json:"tarif_overtime,omitempty" validate:"omitempty,numeric"` + SatuanOvertime *string `json:"satuan_overtime,omitempty" validate:"omitempty,min=1,max=255"` + RekeningPokok *string `json:"rekening_pokok,omitempty" validate:"omitempty,min=1,max=255"` + RekeningDenda *string `json:"rekening_denda,omitempty" validate:"omitempty,min=1,max=255"` +} + +// Response struct untuk create +type RetribusiCreateResponse struct { + Message string `json:"message"` + Data *Retribusi `json:"data"` +} + +// Update request - sama seperti create tapi dengan ID +type RetribusiUpdateRequest struct { + ID string `json:"-" validate:"required,uuid4"` // ID dari URL path + Status string `json:"status" validate:"required,oneof=draft active inactive"` + Jenis *string `json:"jenis,omitempty" validate:"omitempty,min=1,max=255"` + Pelayanan *string `json:"pelayanan,omitempty" validate:"omitempty,min=1,max=255"` + Dinas *string `json:"dinas,omitempty" validate:"omitempty,min=1,max=255"` + KelompokObyek *string `json:"kelompok_obyek,omitempty" validate:"omitempty,min=1,max=255"` + KodeTarif *string `json:"kode_tarif,omitempty" validate:"omitempty,min=1,max=255"` + Uraian1 *string `json:"uraian_1,omitempty"` + Uraian2 *string `json:"uraian_2,omitempty"` + Uraian3 *string `json:"uraian_3,omitempty"` + Tarif *string `json:"tarif,omitempty" validate:"omitempty,numeric"` + Satuan *string `json:"satuan,omitempty" validate:"omitempty,min=1,max=255"` + TarifOvertime *string `json:"tarif_overtime,omitempty" validate:"omitempty,numeric"` + SatuanOvertime *string `json:"satuan_overtime,omitempty" validate:"omitempty,min=1,max=255"` + RekeningPokok *string `json:"rekening_pokok,omitempty" validate:"omitempty,min=1,max=255"` + RekeningDenda *string `json:"rekening_denda,omitempty" validate:"omitempty,min=1,max=255"` +} + +// Response struct untuk update +type RetribusiUpdateResponse struct { + Message string `json:"message"` + Data *Retribusi `json:"data"` +} + +// Response struct untuk delete +type RetribusiDeleteResponse struct { + Message string `json:"message"` + ID string `json:"id"` +} + +// Enhanced GET response dengan pagination dan aggregation +type RetribusiGetResponse struct { + Message string `json:"message"` + Data []Retribusi `json:"data"` + Meta models.MetaResponse `json:"meta"` + Summary *models.AggregateData `json:"summary,omitempty"` +} + +// Filter struct untuk query parameters +type RetribusiFilter struct { + Status *string `json:"status,omitempty" form:"status"` + Jenis *string `json:"jenis,omitempty" form:"jenis"` + Dinas *string `json:"dinas,omitempty" form:"dinas"` + KelompokObyek *string `json:"kelompok_obyek,omitempty" form:"kelompok_obyek"` + Search *string `json:"search,omitempty" form:"search"` + DateFrom *time.Time `json:"date_from,omitempty" form:"date_from"` + DateTo *time.Time `json:"date_to,omitempty" form:"date_to"` +} diff --git a/internal/models/validation.go b/internal/models/validation.go new file mode 100644 index 0000000..1462d35 --- /dev/null +++ b/internal/models/validation.go @@ -0,0 +1,106 @@ +package models + +import ( + "regexp" + "strings" + "time" + + "github.com/go-playground/validator/v10" +) + +// CustomValidator wraps the validator +type CustomValidator struct { + Validator *validator.Validate +} + +// Validate validates struct +func (cv *CustomValidator) Validate(i interface{}) error { + return cv.Validator.Struct(i) +} + +// RegisterCustomValidations registers custom validation rules +func RegisterCustomValidations(v *validator.Validate) { + // Validate Indonesian phone number + v.RegisterValidation("indonesian_phone", validateIndonesianPhone) + + // Validate BPJS card number format + v.RegisterValidation("bpjs_card", validateBPJSCard) + + // Validate Indonesian NIK + v.RegisterValidation("indonesian_nik", validateIndonesianNIK) + + // Validate date format YYYY-MM-DD + v.RegisterValidation("date_format", validateDateFormat) + + // Validate ICD-10 code format + v.RegisterValidation("icd10", validateICD10) + + // Validate ICD-9-CM procedure code + v.RegisterValidation("icd9cm", validateICD9CM) +} + +func validateIndonesianPhone(fl validator.FieldLevel) bool { + phone := fl.Field().String() + if phone == "" { + return true // Optional field + } + + // Indonesian phone number pattern: +62, 62, 08, or 8 + pattern := `^(\+?62|0?8)[1-9][0-9]{7,11}$` + matched, _ := regexp.MatchString(pattern, phone) + return matched +} + +func validateBPJSCard(fl validator.FieldLevel) bool { + card := fl.Field().String() + if len(card) != 13 { + return false + } + + // BPJS card should be numeric + pattern := `^\d{13}$` + matched, _ := regexp.MatchString(pattern, card) + return matched +} + +func validateIndonesianNIK(fl validator.FieldLevel) bool { + nik := fl.Field().String() + if len(nik) != 16 { + return false + } + + // NIK should be numeric + pattern := `^\d{16}$` + matched, _ := regexp.MatchString(pattern, nik) + return matched +} + +func validateDateFormat(fl validator.FieldLevel) bool { + dateStr := fl.Field().String() + _, err := time.Parse("2006-01-02", dateStr) + return err == nil +} + +func validateICD10(fl validator.FieldLevel) bool { + code := fl.Field().String() + if code == "" { + return true + } + + // Basic ICD-10 pattern: Letter followed by 2 digits, optional dot and more digits + pattern := `^[A-Z]\d{2}(\.\d+)?$` + matched, _ := regexp.MatchString(pattern, strings.ToUpper(code)) + return matched +} + +func validateICD9CM(fl validator.FieldLevel) bool { + code := fl.Field().String() + if code == "" { + return true + } + + // Basic ICD-9-CM procedure pattern: 2-4 digits with optional decimal + pattern := `^\d{2,4}(\.\d+)?$` + matched, _ := regexp.MatchString(pattern, code) + return matched +} diff --git a/internal/models/vclaim/peserta/peserta.go b/internal/models/vclaim/peserta/peserta.go new file mode 100644 index 0000000..8c871fb --- /dev/null +++ b/internal/models/vclaim/peserta/peserta.go @@ -0,0 +1,71 @@ +package peserta + +import "api-service/internal/models" + +// === PESERTA MODELS === + +// PesertaRequest represents peserta lookup request +type PesertaRequest struct { + models.BaseRequest + NoKartu string `json:"nokartu" validate:"required,min=13,max=13"` + NIK string `json:"nik,omitempty" validate:"omitempty,min=16,max=16"` + TanggalSEP string `json:"tglsep" validate:"required" example:"2024-01-15"` + NoTelepon string `json:"notelp,omitempty" validate:"omitempty,max=15"` +} + +// PesertaData represents peserta information from BPJS +type PesertaData struct { + NoKartu string `json:"noKartu"` + NIK string `json:"nik"` + Nama string `json:"nama"` + Pisa string `json:"pisa"` + Sex string `json:"sex"` + TanggalLahir string `json:"tglLahir"` + TglCetakKartu string `json:"tglCetakKartu"` + TglTAT string `json:"tglTAT"` + TglTMT string `json:"tglTMT"` + StatusPeserta struct { + Kode string `json:"kode"` + Keterangan string `json:"keterangan"` + } `json:"statusPeserta"` + ProvUmum struct { + KdProvider string `json:"kdProvider"` + NmProvider string `json:"nmProvider"` + } `json:"provUmum"` + JenisPeserta struct { + Kode string `json:"kode"` + Keterangan string `json:"keterangan"` + } `json:"jenisPeserta"` + HakKelas struct { + Kode string `json:"kode"` + Keterangan string `json:"keterangan"` + } `json:"hakKelas"` + Umur struct { + UmurSekarang string `json:"umurSekarang"` + UmurSaatPelayanan string `json:"umurSaatPelayanan"` + } `json:"umur"` + Informasi struct { + Dinsos interface{} `json:"dinsos"` + ProlanisPRB string `json:"prolanisPRB"` + NoSKTM interface{} `json:"noSKTM"` + ESEP interface{} `json:"eSEP"` + } `json:"informasi"` + Cob struct { + NoAsuransi interface{} `json:"noAsuransi"` + NmAsuransi interface{} `json:"nmAsuransi"` + TglTMT interface{} `json:"tglTMT"` + TglTAT interface{} `json:"tglTAT"` + } `json:"cob"` + MR struct { + NoMR string `json:"noMR"` + NoTelepon string `json:"noTelepon"` + } `json:"mr,omitempty"` + RawResponse string `json:"raw_response,omitempty"` +} + +// PesertaResponse represents peserta API response +type PesertaResponse struct { + models.BaseResponse + Data *PesertaData `json:"data,omitempty"` + MetaData interface{} `json:"metaData,omitempty"` +} diff --git a/internal/routes/v1/routes.go b/internal/routes/v1/routes.go new file mode 100644 index 0000000..a68c411 --- /dev/null +++ b/internal/routes/v1/routes.go @@ -0,0 +1,172 @@ +package v1 + +import ( + "api-service/internal/config" + "api-service/internal/database" + authHandlers "api-service/internal/handlers/auth" + healthcheckHandlers "api-service/internal/handlers/healthcheck" + pesertaHandlers "api-service/internal/handlers/peserta" + retribusiHandlers "api-service/internal/handlers/retribusi" + "api-service/internal/middleware" + services "api-service/internal/services/auth" + "api-service/pkg/logger" + "time" + + "github.com/gin-gonic/gin" + "github.com/go-playground/validator/v10" + swaggerFiles "github.com/swaggo/files" + ginSwagger "github.com/swaggo/gin-swagger" +) + +func RegisterRoutes(cfg *config.Config) *gin.Engine { + router := gin.New() + + // Initialize auth middleware configuration + middleware.InitializeAuth(cfg) + + // Add global middleware + router.Use(middleware.CORSConfig()) + router.Use(middleware.ErrorHandler()) + router.Use(logger.RequestLoggerMiddleware(logger.Default())) + router.Use(gin.Recovery()) + + // Initialize services with error handling + authService := services.NewAuthService(cfg) + if authService == nil { + logger.Fatal("Failed to initialize auth service") + } + + // Initialize database service + dbService := database.New(cfg) + + // ============================================================================= + // HEALTH CHECK & SYSTEM ROUTES + // ============================================================================= + + healthCheckHandler := healthcheckHandlers.NewHealthCheckHandler(dbService) + sistem := router.Group("/api/sistem") + { + sistem.GET("/health", healthCheckHandler.CheckHealth) + sistem.GET("/databases", func(c *gin.Context) { + c.JSON(200, gin.H{ + "databases": dbService.ListDBs(), + "health": dbService.Health(), + "timestamp": time.Now().Unix(), + }) + }) + sistem.GET("/info", func(c *gin.Context) { + c.JSON(200, gin.H{ + "service": "API Service v1.0.0", + "websocket_active": true, + "databases": dbService.ListDBs(), + "timestamp": time.Now().Unix(), + }) + }) + } + + // ============================================================================= + // SWAGGER DOCUMENTATION + // ============================================================================= + + router.GET("/swagger/*any", ginSwagger.WrapHandler( + swaggerFiles.Handler, + ginSwagger.DefaultModelsExpandDepth(-1), + ginSwagger.DeepLinking(true), + )) + + // ============================================================================= + // WEBSOCKET TEST CLIENT + // ============================================================================= + + // router.GET("/websocket-test", func(c *gin.Context) { + // c.Header("Content-Type", "text/html") + // c.String(http.StatusOK, getWebSocketTestHTML()) + // }) + + // ============================================================================= + // API v1 GROUP + // ============================================================================= + + v1 := router.Group("/api/v1") + + // ============================================================================= + // PUBLIC ROUTES (No Authentication Required) + // ============================================================================= + + // Authentication routes + authHandler := authHandlers.NewAuthHandler(authService) + tokenHandler := authHandlers.NewTokenHandler(authService) + + // Basic auth routes + v1.POST("/auth/login", authHandler.Login) + v1.POST("/auth/register", authHandler.Register) + v1.POST("/auth/refresh", authHandler.RefreshToken) + + // Token generation routes + v1.POST("/token/generate", tokenHandler.GenerateToken) + v1.POST("/token/generate-direct", tokenHandler.GenerateTokenDirect) + + // ============================================================================= + // PUBLISHED ROUTES + // ============================================================================= + + // Participant eligibility information (peserta) routes + pesertaHandler := pesertaHandlers.NewPesertaHandler(pesertaHandlers.PesertaHandlerConfig{ + Config: cfg, + Logger: *logger.Default(), + Validator: validator.New(), + }) + pesertaGroup := v1.Group("/Peserta") + pesertaGroup.GET("/nokartu/:nokartu", pesertaHandler.GetBynokartu) + pesertaGroup.GET("/nik/:nik", pesertaHandler.GetBynik) + + // Retribusi endpoints with WebSocket notifications + retribusiHandler := retribusiHandlers.NewRetribusiHandler() + retribusiGroup := v1.Group("/retribusi") + { + retribusiGroup.GET("", retribusiHandler.GetRetribusi) + retribusiGroup.GET("/dynamic", retribusiHandler.GetRetribusiDynamic) + retribusiGroup.GET("/search", retribusiHandler.SearchRetribusiAdvanced) + retribusiGroup.GET("/id/:id", retribusiHandler.GetRetribusiByID) + + // POST/PUT/DELETE with automatic WebSocket notifications + retribusiGroup.POST("", func(c *gin.Context) { + retribusiHandler.CreateRetribusi(c) + }) + + retribusiGroup.PUT("/id/:id", func(c *gin.Context) { + retribusiHandler.UpdateRetribusi(c) + }) + + retribusiGroup.DELETE("/id/:id", func(c *gin.Context) { + retribusiHandler.DeleteRetribusi(c) + }) + } + + // ============================================================================= + // PROTECTED ROUTES (Authentication Required) + // ============================================================================= + + protected := v1.Group("/") + protected.Use(middleware.ConfigurableAuthMiddleware(cfg)) + // Protected retribusi endpoints (Authentication Required) + protectedRetribusiGroup := protected.Group("/retribusi") + { + protectedRetribusiGroup.GET("", retribusiHandler.GetRetribusi) + protectedRetribusiGroup.GET("/dynamic", retribusiHandler.GetRetribusiDynamic) + protectedRetribusiGroup.GET("/search", retribusiHandler.SearchRetribusiAdvanced) + protectedRetribusiGroup.GET("/id/:id", retribusiHandler.GetRetribusiByID) + protectedRetribusiGroup.POST("", func(c *gin.Context) { + retribusiHandler.CreateRetribusi(c) + }) + + protectedRetribusiGroup.PUT("/id/:id", func(c *gin.Context) { + retribusiHandler.UpdateRetribusi(c) + }) + + protectedRetribusiGroup.DELETE("/id/:id", func(c *gin.Context) { + retribusiHandler.DeleteRetribusi(c) + }) + } + return router +} diff --git a/internal/server/server.go b/internal/server/server.go new file mode 100644 index 0000000..98ef90c --- /dev/null +++ b/internal/server/server.go @@ -0,0 +1,53 @@ +package server + +import ( + "fmt" + "net/http" + "os" + "strconv" + "time" + + _ "github.com/joho/godotenv/autoload" + + "api-service/internal/config" + "api-service/internal/database" + v1 "api-service/internal/routes/v1" +) + +var dbService database.Service // Global variable to hold the database service instance + +type Server struct { + port int + db database.Service +} + +func NewServer() *http.Server { + // Load configuration + cfg := config.LoadConfig() + cfg.Validate() + + port, _ := strconv.Atoi(os.Getenv("PORT")) + if port == 0 { + port = cfg.Server.Port + } + + if dbService == nil { // Check if the database service is already initialized + dbService = database.New(cfg) // Initialize only once + } + + NewServer := &Server{ + port: port, + db: dbService, // Use the global database service instance + } + + // Declare Server config + server := &http.Server{ + Addr: fmt.Sprintf(":%d", NewServer.port), + Handler: v1.RegisterRoutes(cfg), + IdleTimeout: time.Minute, + ReadTimeout: 10 * time.Second, + WriteTimeout: 30 * time.Second, + } + + return server +} diff --git a/internal/services/auth/auth.go b/internal/services/auth/auth.go new file mode 100644 index 0000000..d76aadb --- /dev/null +++ b/internal/services/auth/auth.go @@ -0,0 +1,169 @@ +package services + +import ( + "api-service/internal/config" + models "api-service/internal/models/auth" + "errors" + "time" + + "github.com/golang-jwt/jwt/v5" + "golang.org/x/crypto/bcrypt" +) + +// AuthService handles authentication logic +type AuthService struct { + config *config.Config + users map[string]*models.User // In-memory user store for demo +} + +// NewAuthService creates a new authentication service +func NewAuthService(cfg *config.Config) *AuthService { + // Initialize with demo users + users := make(map[string]*models.User) + + // Add demo users + users["admin"] = &models.User{ + ID: "1", + Username: "admin", + Email: "admin@example.com", + Password: "$2a$10$92IXUNpkjO0rOQ5byMi.Ye4oKoEa3Ro9llC/.og/at2.uheWG/igi", // password + Role: "admin", + } + + users["user"] = &models.User{ + ID: "2", + Username: "user", + Email: "user@example.com", + Password: "$2a$10$92IXUNpkjO0rOQ5byMi.Ye4oKoEa3Ro9llC/.og/at2.uheWG/igi", // password + Role: "user", + } + + return &AuthService{ + config: cfg, + users: users, + } +} + +// Login authenticates user and generates JWT token +func (s *AuthService) Login(username, password string) (*models.TokenResponse, error) { + user, exists := s.users[username] + if !exists { + return nil, errors.New("invalid credentials") + } + + // Verify password + err := bcrypt.CompareHashAndPassword([]byte(user.Password), []byte(password)) + if err != nil { + return nil, errors.New("invalid credentials") + } + + // Generate JWT token + token, err := s.generateToken(user) + if err != nil { + return nil, err + } + + return &models.TokenResponse{ + AccessToken: token, + TokenType: "Bearer", + ExpiresIn: 3600, // 1 hour + }, nil +} + +// generateToken creates a new JWT token for the user +func (s *AuthService) generateToken(user *models.User) (string, error) { + // Create claims + claims := jwt.MapClaims{ + "user_id": user.ID, + "username": user.Username, + "email": user.Email, + "role": user.Role, + "exp": time.Now().Add(time.Hour * 1).Unix(), + "iat": time.Now().Unix(), + } + + // Create token + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + + // Sign token with secret key + secretKey := []byte(s.getJWTSecret()) + return token.SignedString(secretKey) +} + +// GenerateTokenForUser generates a JWT token for a specific user +func (s *AuthService) GenerateTokenForUser(user *models.User) (string, error) { + // Create claims + claims := jwt.MapClaims{ + "user_id": user.ID, + "username": user.Username, + "email": user.Email, + "role": user.Role, + "exp": time.Now().Add(time.Hour * 1).Unix(), + "iat": time.Now().Unix(), + } + + // Create token + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + + // Sign token with secret key + secretKey := []byte(s.getJWTSecret()) + return token.SignedString(secretKey) +} + +// ValidateToken validates the JWT token +func (s *AuthService) ValidateToken(tokenString string) (*models.JWTClaims, error) { + token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, errors.New("unexpected signing method") + } + return []byte(s.getJWTSecret()), nil + }) + + if err != nil { + return nil, err + } + + if !token.Valid { + return nil, errors.New("invalid token") + } + + claims, ok := token.Claims.(jwt.MapClaims) + if !ok { + return nil, errors.New("invalid claims") + } + + return &models.JWTClaims{ + UserID: claims["user_id"].(string), + Username: claims["username"].(string), + Email: claims["email"].(string), + Role: claims["role"].(string), + }, nil +} + +// getJWTSecret returns the JWT secret key +func (s *AuthService) getJWTSecret() string { + // In production, this should come from environment variables + return "your-secret-key-change-this-in-production" +} + +// RegisterUser registers a new user (for demo purposes) +func (s *AuthService) RegisterUser(username, email, password, role string) error { + if _, exists := s.users[username]; exists { + return errors.New("username already exists") + } + + hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost) + if err != nil { + return err + } + + s.users[username] = &models.User{ + ID: string(rune(len(s.users) + 1)), + Username: username, + Email: email, + Password: string(hashedPassword), + Role: role, + } + + return nil +} diff --git a/internal/services/bpjs/response.go b/internal/services/bpjs/response.go new file mode 100644 index 0000000..97ce46c --- /dev/null +++ b/internal/services/bpjs/response.go @@ -0,0 +1,1071 @@ +package services + +import ( + helper "api-service/internal/helpers/bpjs" + "bytes" + "compress/gzip" + "crypto/aes" + "crypto/cipher" + "crypto/md5" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "strings" + "unicode" + "unicode/utf16" + "unicode/utf8" + + lzstring "github.com/daku10/go-lz-string" +) + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// GenerateBPJSKey - Generate key sesuai standar BPJS VClaim +func GenerateBPJSKey(consumerID, timestamp, userKey string) string { + // Format: consumerID + "&" + timestamp + "&" + userKey + keyString := fmt.Sprintf("%s&%s&%s", consumerID, timestamp, userKey) + + // BPJS biasanya menggunakan key dengan length 32 bytes + if len(keyString) > 32 { + return keyString[:32] + } + + // Pad dengan spasi jika kurang dari 32 + for len(keyString) < 32 { + keyString += " " + } + + log.Printf("GenerateBPJSKey: Generated key length: %d", len(keyString)) + return keyString +} + +// ResponseVclaim decrypts the encrypted response from VClaim API + +func ResponseVclaim(encrypted string, key string) (string, error) { + log.Println("ResponseVclaim: Starting decryption process") + log.Printf("ResponseVclaim: Encrypted string length: %d", len(encrypted)) + + // Pad the base64 string if needed + if len(encrypted)%4 != 0 { + padding := (4 - len(encrypted)%4) % 4 + for i := 0; i < padding; i++ { + encrypted += "=" + } + } + + // Decode base64 + cipherText, err := base64.StdEncoding.DecodeString(encrypted) + if err != nil { + log.Printf("ResponseVclaim: Failed to decode base64: %v", err) + return "", err + } + + if len(cipherText) < aes.BlockSize { + return "", errors.New("cipherText too short") + } + + // Try multiple key generation and decryption methods + keyMethods := []func(string) ([]byte, error){ + // Method 1: SHA256 hash of key + func(k string) ([]byte, error) { + hash := sha256.Sum256([]byte(k)) + return hash[:], nil + }, + // Method 2: Key as-is (padded/truncated to 32 bytes) + func(k string) ([]byte, error) { + keyBytes := []byte(k) + if len(keyBytes) < 32 { + // Pad with zeros + padded := make([]byte, 32) + copy(padded, keyBytes) + return padded, nil + } + return keyBytes[:32], nil + }, + // Method 3: MD5 hash repeated to make 32 bytes + func(k string) ([]byte, error) { + hash := md5.Sum([]byte(k)) + key32 := make([]byte, 32) + copy(key32[:16], hash[:]) + copy(key32[16:], hash[:]) + return key32, nil + }, + } + + for keyIdx, keyMethod := range keyMethods { + keyBytes, err := keyMethod(key) + if err != nil { + continue + } + + block, err := aes.NewCipher(keyBytes) + if err != nil { + continue + } + + // Try different IV methods for each key method + ivMethods := []func([]byte, []byte) (string, error){ + // IV from key hash + func(ct, kb []byte) (string, error) { + hash := sha256.Sum256(kb) + return tryDecryptWithCustomIV(ct, block, hash[:aes.BlockSize]) + }, + // IV from ciphertext + func(ct, kb []byte) (string, error) { + return tryDecryptWithCipherIV(ct, block) + }, + // Zero IV + func(ct, kb []byte) (string, error) { + iv := make([]byte, aes.BlockSize) + return tryDecryptWithCustomIV(ct, block, iv) + }, + // IV from key directly + func(ct, kb []byte) (string, error) { + iv := make([]byte, aes.BlockSize) + copy(iv, kb[:aes.BlockSize]) + return tryDecryptWithCustomIV(ct, block, iv) + }, + } + + for ivIdx, ivMethod := range ivMethods { + if result, err := ivMethod(cipherText, keyBytes); err == nil { + log.Printf("ResponseVclaim: Success with key method %d, IV method %d", keyIdx+1, ivIdx+1) + log.Printf("ResponseVclaim result preview: %s", result[:min(200, len(result))]) + return result, nil + } else { + log.Printf("ResponseVclaim: Key method %d, IV method %d failed: %v", keyIdx+1, ivIdx+1, err) + } + } + } + + return "", errors.New("all decryption methods failed") +} + +// func ResponseVclaim(encrypted string, key string) (string, error) { +// log.Println("ResponseVclaim: Starting decryption process") +// log.Printf("ResponseVclaim: Encrypted string length: %d", len(encrypted)) + +// // Pad the base64 string if needed +// if len(encrypted)%4 != 0 { +// padding := (4 - len(encrypted)%4) % 4 +// for i := 0; i < padding; i++ { +// encrypted += "=" +// } +// } + +// // Decode base64 +// cipherText, err := base64.StdEncoding.DecodeString(encrypted) +// if err != nil { +// log.Printf("ResponseVclaim: Failed to decode base64: %v", err) +// return "", err +// } + +// if len(cipherText) < aes.BlockSize { +// return "", errors.New("cipherText too short") +// } + +// // Create AES cipher +// hash := sha256.Sum256([]byte(key)) +// block, err := aes.NewCipher(hash[:]) +// if err != nil { +// return "", err +// } + +// // Try multiple decryption methods +// methods := []func([]byte, cipher.Block, []byte) (string, error){ +// // Method 1: IV from hash (current approach) +// func(ct []byte, b cipher.Block, h []byte) (string, error) { +// return tryDecryptWithHashIV(ct, b, h[:aes.BlockSize]) +// }, +// // Method 2: IV from cipherText (standard approach) +// func(ct []byte, b cipher.Block, h []byte) (string, error) { +// return tryDecryptWithCipherIV(ct, b) +// }, +// // Method 3: Try without padding removal +// func(ct []byte, b cipher.Block, h []byte) (string, error) { +// return tryDecryptWithoutPaddingRemoval(ct, b, h[:aes.BlockSize]) +// }, +// } + +// for i, method := range methods { +// if result, err := method(cipherText, block, hash[:]); err == nil { +// log.Printf("ResponseVclaim: Success with method %d", i+1) +// log.Printf("ResponseVclaim result: %s", result[:min(100, len(result))]) +// return result, nil +// } else { +// log.Printf("ResponseVclaim: Method %d failed: %v", i+1, err) +// } +// } + +// return "", errors.New("all decryption methods failed") +// } + +// func ResponseVclaim(encrypted string, key string) (string, error) { +// log.Println("ResponseVclaim: Starting decryption process") +// log.Printf("ResponseVclaim: Encrypted string length: %d", len(encrypted)) + +// // Pad the base64 string if needed +// if len(encrypted)%4 != 0 { +// padding := (4 - len(encrypted)%4) % 4 +// for i := 0; i < padding; i++ { +// encrypted += "=" +// } +// } + +// // Decode base64 +// cipherText, err := base64.StdEncoding.DecodeString(encrypted) +// if err != nil { +// log.Printf("ResponseVclaim: Failed to decode base64: %v", err) +// return "", err +// } + +// if len(cipherText) < aes.BlockSize { +// return "", errors.New("cipherText too short") +// } + +// // Create AES cipher +// hash := sha256.Sum256([]byte(key)) +// block, err := aes.NewCipher(hash[:]) +// if err != nil { +// return "", err +// } + +// // Try both IV methods +// // Method 1: IV from hash (current approach) +// if result, err := tryDecryptWithHashIV(cipherText, block, hash[:aes.BlockSize]); err == nil { +// log.Printf("ResponseVclaim: Success with hash IV method") +// log.Println("ResponseVclaim: ", result) +// return result, nil +// } + +// // Method 2: IV from cipherText (standard approach) +// if result, err := tryDecryptWithCipherIV(cipherText, block); err == nil { +// log.Printf("ResponseVclaim: Success with cipher IV method") +// return result, nil +// } + +// return "", errors.New("all decryption methods failed") +// } + +// func tryDecryptWithHashIV(cipherText []byte, block cipher.Block, iv []byte) (string, error) { +// if len(cipherText)%aes.BlockSize != 0 { +// return "", errors.New("cipherText is not a multiple of the block size") +// } + +// mode := cipher.NewCBCDecrypter(block, iv) +// decrypted := make([]byte, len(cipherText)) +// mode.CryptBlocks(decrypted, cipherText) + +// // Remove PKCS7 padding +// decrypted = helper.RemovePKCS7Padding(decrypted) +// log.Printf("tryDecryptWithHashIV: Decryption completed, length: %d", len(decrypted)) + +// return tryAllDecompressionMethods(decrypted) +// } +func tryDecryptWithHashIV(cipherText []byte, block cipher.Block, iv []byte) (string, error) { + if len(cipherText)%aes.BlockSize != 0 { + return "", errors.New("cipherText is not a multiple of the block size") + } + + mode := cipher.NewCBCDecrypter(block, iv) + decrypted := make([]byte, len(cipherText)) + mode.CryptBlocks(decrypted, cipherText) + + // Log raw decrypted data before padding removal + log.Printf("tryDecryptWithHashIV: Raw decrypted length: %d", len(decrypted)) + + // Remove PKCS7 padding + decrypted = helper.RemovePKCS7Padding(decrypted) + log.Printf("tryDecryptWithHashIV: After padding removal, length: %d", len(decrypted)) + + // Log first 50 bytes untuk debugging + logLen := min(50, len(decrypted)) + log.Printf("tryDecryptWithHashIV: First %d bytes: %q", logLen, string(decrypted[:logLen])) + + return tryAllDecompressionMethods(decrypted) +} + +// func tryDecryptWithCipherIV(cipherText []byte, block cipher.Block) (string, error) { +// if len(cipherText) < aes.BlockSize { +// return "", errors.New("cipherText too short for IV extraction") +// } + +// // Extract IV from first block +// iv := cipherText[:aes.BlockSize] +// cipherData := cipherText[aes.BlockSize:] + +// if len(cipherData)%aes.BlockSize != 0 { +// return "", errors.New("cipher data is not a multiple of the block size") +// } + +// mode := cipher.NewCBCDecrypter(block, iv) +// decrypted := make([]byte, len(cipherData)) +// mode.CryptBlocks(decrypted, cipherData) + +// // Remove PKCS7 padding +// decrypted = helper.RemovePKCS7Padding(decrypted) +// log.Printf("tryDecryptWithCipherIV: Decryption completed, length: %d", len(decrypted)) + +// return tryAllDecompressionMethods(decrypted) +// } +func tryDecryptWithCipherIV(cipherText []byte, block cipher.Block) (string, error) { + if len(cipherText) < aes.BlockSize { + return "", errors.New("cipherText too short for IV extraction") + } + + // Extract IV from first block + iv := cipherText[:aes.BlockSize] + cipherData := cipherText[aes.BlockSize:] + + if len(cipherData)%aes.BlockSize != 0 { + return "", errors.New("cipher data is not a multiple of the block size") + } + + mode := cipher.NewCBCDecrypter(block, iv) + decrypted := make([]byte, len(cipherData)) + mode.CryptBlocks(decrypted, cipherData) + + // Log raw decrypted data before padding removal + log.Printf("tryDecryptWithCipherIV: Raw decrypted length: %d", len(decrypted)) + + // Remove PKCS7 padding + decrypted = helper.RemovePKCS7Padding(decrypted) + log.Printf("tryDecryptWithCipherIV: After padding removal, length: %d", len(decrypted)) + + // Log first 50 bytes untuk debugging + logLen := min(50, len(decrypted)) + log.Printf("tryDecryptWithCipherIV: First %d bytes: %q", logLen, string(decrypted[:logLen])) + + return tryAllDecompressionMethods(decrypted) +} +func tryAllDecompressionMethods(data []byte) (string, error) { + log.Printf("tryAllDecompressionMethods: Attempting decompression, data length: %d", len(data)) + + // Log hex dump for better debugging + hexDump := make([]string, min(32, len(data))) + for i := 0; i < len(hexDump); i++ { + hexDump[i] = fmt.Sprintf("%02x", data[i]) + } + log.Printf("tryAllDecompressionMethods: Hex dump (first 32 bytes): %s", strings.Join(hexDump, " ")) + + // Method 1: Try LZ-string first (most common for BPJS) + if result, err := tryLZStringMethods(data); err == nil { + log.Println("tryAllDecompressionMethods: LZ-string decompression successful") + return result, nil + } + + // Method 2: Try gzip + if result, err := tryGzipDecompression(data); err == nil && isValidDecompressedResult(result) { + log.Println("tryAllDecompressionMethods: Gzip decompression successful") + return result, nil + } + + // Method 3: Try as plain text + if isValidUTF8AndPrintable(string(data)) { + result := string(data) + if isValidDecompressedResult(result) { + log.Println("tryAllDecompressionMethods: Data is already valid text") + return result, nil + } + } + + // Method 4: Try base64 decode then decompress + if result, err := tryBase64ThenDecompress(data); err == nil { + log.Println("tryAllDecompressionMethods: Base64 then decompress successful") + return result, nil + } + + log.Printf("tryAllDecompressionMethods: All methods failed") + return "", errors.New("all decompression methods failed") +} + +// func tryAllDecompressionMethods(data []byte) (string, error) { +// log.Printf("tryAllDecompressionMethods: Attempting decompression, data length: %d", len(data)) + +// // Method 1: Try LZ-string decompression FIRST (paling umum untuk BPJS API) +// if result, err := tryLZStringMethods(data); err == nil { +// log.Println("tryAllDecompressionMethods: LZ-string decompression successful") +// return result, nil +// } + +// // Method 2: Try gzip decompression +// if result, err := tryGzipDecompression(data); err == nil && isValidDecompressedResult(result) { +// log.Println("tryAllDecompressionMethods: Gzip decompression successful") +// return result, nil +// } + +// // Method 3: Check if it's already valid JSON/text (SETELAH mencoba decompression) +// if isValidUTF8AndPrintable(string(data)) { +// result := string(data) +// if isValidDecompressedResult(result) { +// log.Println("tryAllDecompressionMethods: Data is already valid, returning as-is") +// return result, nil +// } +// } + +// // Method 4: Log the raw data for debugging +// log.Printf("tryAllDecompressionMethods: All methods failed. Raw data (first 100 bytes): %q", string(data[:min(100, len(data))])) + +// return "", errors.New("all decompression methods failed") +// } +// func tryLZStringMethods(data []byte) (string, error) { +// dataStr := string(data) + +// log.Printf("tryLZStringMethods: Attempting LZ-string decompression on data: %s", dataStr[:min(50, len(dataStr))]) + +// // Method 1: DecompressFromEncodedURIComponent (paling umum) +// if result, err := lzstring.DecompressFromEncodedURIComponent(dataStr); err == nil && len(result) > 0 { +// log.Printf("LZ-string DecompressFromEncodedURIComponent attempted, result length: %d", len(result)) +// if isValidDecompressedResult(result) { +// log.Printf("LZ-string DecompressFromEncodedURIComponent successful and valid") +// return result, nil +// } else { +// log.Printf("LZ-string DecompressFromEncodedURIComponent result not valid: %s", result[:min(100, len(result))]) +// } +// } + +// // Method 2: DecompressFromBase64 +// if result, err := lzstring.DecompressFromBase64(dataStr); err == nil && len(result) > 0 { +// log.Printf("LZ-string DecompressFromBase64 attempted, result length: %d", len(result)) +// if isValidDecompressedResult(result) { +// log.Printf("LZ-string DecompressFromBase64 successful and valid") +// return result, nil +// } else { +// log.Printf("LZ-string DecompressFromBase64 result not valid: %s", result[:min(100, len(result))]) +// } +// } + +// // Method 3: Try base64 decode first, then decompress +// if decoded, err := base64.StdEncoding.DecodeString(dataStr); err == nil { +// if result, err := lzstring.DecompressFromEncodedURIComponent(string(decoded)); err == nil && len(result) > 0 { +// if isValidDecompressedResult(result) { +// log.Printf("LZ-string with base64 decode first successful and valid") +// return result, nil +// } +// } +// } + +// // Method 4: Try as raw bytes (convert each byte to uint16) +// if result, err := tryRawBytesToLZString(data); err == nil && len(result) > 0 { +// if isValidDecompressedResult(result) { +// log.Printf("LZ-string from raw bytes successful and valid") +// return result, nil +// } +// } + +// log.Printf("All LZ-string methods failed or returned invalid results") +// return "", errors.New("all LZ-string methods failed") +// } + +func tryLZStringMethods(data []byte) (string, error) { + dataStr := string(data) + log.Printf("tryLZStringMethods: Raw data length: %d", len(dataStr)) + + // Method 1: Bersihkan prefix corrupt dan cari pattern LZ-string + cleanedData := extractCleanLZString(dataStr) + if cleanedData != "" { + log.Printf("tryLZStringMethods: Found clean LZ-string: %s", cleanedData[:min(50, len(cleanedData))]) + + // Dekompresi sesuai standar BPJS + if result, err := lzstring.DecompressFromEncodedURIComponent(cleanedData); err == nil && len(result) > 0 { + if isValidDecompressedResult(result) { + log.Printf("LZ-string decompression successful, length: %d", len(result)) + return result, nil + } + } + } + + // Method 2: Fallback direct decompression + if result, err := lzstring.DecompressFromEncodedURIComponent(dataStr); err == nil && len(result) > 0 { + if isValidDecompressedResult(result) { + return result, nil + } + } + + return "", errors.New("LZ-string decompression failed") +} +func extractCleanLZString(data string) string { + // Pattern LZ-string umum dari dokumentasi BPJS + patterns := []string{"EAuUA", "N4Ig", "BwIw", "CwIw", "DwIw", "EwIw", "FwIw", "GwIw", "HwIw"} + + for _, pattern := range patterns { + if idx := strings.Index(data, pattern); idx >= 0 { + // Ekstrak dari pattern hingga akhir + candidate := data[idx:] + log.Printf("extractCleanLZString: Found pattern '%s' at position %d", pattern, idx) + + // Bersihkan hanya karakter base64 valid + cleaned := extractBase64Only(candidate) + if len(cleaned) > 100 { // Minimal length untuk data valid + return cleaned + } + } + } + + return "" +} + +// Fungsi untuk mengekstrak hanya karakter base64 valid +func extractBase64Only(s string) string { + base64Chars := "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" + var result strings.Builder + + for _, char := range s { + if strings.ContainsRune(base64Chars, char) { + result.WriteRune(char) + } else { + // Stop di karakter non-base64 jika sudah cukup panjang + if result.Len() > 100 { + break + } + } + } + + return result.String() +} + +// Fungsi untuk mencari dan mengekstrak LZ-string dari data yang mungkin corrupt +func findAndExtractLZString(data string) string { + // Pattern LZ-string umum - prioritaskan yang paling umum + patterns := []string{ + "EAuUA", "N4Ig", "BwIw", "CwIw", "DwIw", "EwIw", + "FwIw", "GwIw", "HwIw", "BAuUA", "CAuUA", "DAuUA", "AAuUA", + } + + // Cari pattern yang paling awal dalam string + earliestIdx := -1 + bestPattern := "" + + for _, pattern := range patterns { + if idx := strings.Index(data, pattern); idx >= 0 { + if earliestIdx == -1 || idx < earliestIdx { + earliestIdx = idx + bestPattern = pattern + } + } + } + + if earliestIdx >= 0 { + // Ekstrak dari posisi pattern hingga akhir + candidate := data[earliestIdx:] + log.Printf("findAndExtractLZString: Found pattern '%s' at position %d", bestPattern, earliestIdx) + log.Printf("findAndExtractLZString: Extracted data: %s", candidate[:min(100, len(candidate))]) + + // Bersihkan dari karakter non-base64 di akhir + cleaned := cleanBase64String(candidate) + if len(cleaned) > 50 { + log.Printf("findAndExtractLZString: Cleaned data length: %d", len(cleaned)) + return cleaned + } + } + + // Fallback: cari sequence base64 terpanjang + return extractLongestBase64Sequence(data) +} + +// Fungsi untuk membersihkan string hingga hanya karakter base64 valid +func cleanBase64String(s string) string { + base64Chars := "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" + var result strings.Builder + + for _, char := range s { + if strings.ContainsRune(base64Chars, char) { + result.WriteRune(char) + } else { + // Jika bertemu karakter non-base64 dan sudah cukup panjang, stop + if result.Len() > 100 { + break + } + // Skip karakter non-base64 di awal + } + } + + cleaned := result.String() + log.Printf("cleanBase64String: Original length: %d, Cleaned length: %d", len(s), len(cleaned)) + return cleaned +} + +// // Fungsi untuk membersihkan string hingga hanya karakter base64 valid +// func cleanBase64String(s string) string { +// base64Chars := "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" +// var result strings.Builder + +// for _, char := range s { +// if strings.ContainsRune(base64Chars, char) { +// result.WriteRune(char) +// } else if result.Len() > 50 { +// // Stop jika sudah cukup panjang dan bertemu karakter non-base64 +// break +// } +// } + +// return result.String() +// } + +// Fungsi untuk membersihkan karakter non-printable +func cleanNonPrintableChars(s string) string { + var result strings.Builder + for _, r := range s { + if r >= 32 && r <= 126 || r == '\n' || r == '\r' || r == '\t' { + result.WriteRune(r) + } + } + return result.String() +} + +// Fungsi baru untuk mengekstrak data LZ-string yang bersih dari data corrupt +func extractLZStringFromCorruptData(data string) string { + // Cari pattern LZ-string yang umum + patterns := []string{"N4Ig", "BwIw", "CwIw", "DwIw", "EwIw", "FwIw", "GwIw", "HwIw", "EAuUA"} + + for _, pattern := range patterns { + if idx := strings.Index(data, pattern); idx > 0 { + cleanData := data[idx:] + log.Printf("extractLZStringFromCorruptData: Found pattern '%s' at position %d", pattern, idx) + + // Validasi bahwa data setelah pattern adalah base64-like characters + if isBase64Like(cleanData) { + return cleanData + } + } + } + + // Jika tidak ada pattern yang ditemukan, coba cari sequences panjang dari base64 characters + return extractLongestBase64Sequence(data) +} + +// Fungsi untuk mengecek apakah string berisi karakter base64 +func isBase64Like(s string) bool { + if len(s) < 20 { + return false + } + + base64Chars := "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" + validCount := 0 + + for i, char := range s[:min(100, len(s))] { + if strings.ContainsRune(base64Chars, char) { + validCount++ + } + if i > 20 && float64(validCount)/float64(i+1) < 0.8 { + return false + } + } + + return float64(validCount)/float64(min(100, len(s))) >= 0.8 +} + +// Fungsi untuk mengekstrak sequence base64 terpanjang +func extractLongestBase64Sequence(data string) string { + base64Chars := "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" + + bestStart := -1 + bestLength := 0 + currentStart := -1 + currentLength := 0 + + for i, char := range data { + if strings.ContainsRune(base64Chars, char) { + if currentStart == -1 { + currentStart = i + currentLength = 1 + } else { + currentLength++ + } + } else { + if currentLength > bestLength && currentLength > 50 { + bestStart = currentStart + bestLength = currentLength + } + currentStart = -1 + currentLength = 0 + } + } + + // Check final sequence + if currentLength > bestLength && currentLength > 50 { + bestStart = currentStart + bestLength = currentLength + } + + if bestStart >= 0 && bestLength > 50 { + result := data[bestStart : bestStart+bestLength] + log.Printf("extractLongestBase64Sequence: Found sequence at pos %d, length %d", bestStart, bestLength) + return result + } + + return "" +} + +// Fungsi untuk validasi hasil dekompresi +func isValidDecompressedResult(result string) bool { + if len(result) == 0 { + return false + } + + // Trim whitespace dan cek UTF-8 + trimmed := strings.TrimSpace(result) + if !utf8.ValidString(trimmed) { + return false + } + + // Harus dimulai dengan { atau [ untuk JSON + if len(trimmed) > 0 && (trimmed[0] == '{' || trimmed[0] == '[') { + // Validasi sebagai JSON + var js json.RawMessage + if json.Unmarshal([]byte(result), &js) == nil { + log.Printf("Decompressed result is valid JSON, length: %d", len(result)) + return true + } + } + + // Jika bukan JSON, tolak + log.Printf("Decompressed result is not valid JSON") + return false +} + +// func isValidDecompressedResult(result string) bool { +// if len(result) == 0 { +// return false +// } + +// // Check if result contains only printable ASCII and valid UTF-8 +// if !isValidUTF8AndPrintable(result) { +// log.Printf("Decompressed result contains invalid characters") +// return false +// } + +// // Check if it looks like JSON (starts with { or [) +// trimmed := strings.TrimSpace(result) +// if len(trimmed) > 0 && (trimmed[0] == '{' || trimmed[0] == '[') { +// // Try to validate as JSON +// var js json.RawMessage +// if json.Unmarshal([]byte(result), &js) == nil { +// log.Printf("Decompressed result is valid JSON") +// return true +// } +// } + +// // PERBAIKAN: Jangan anggap data yang dimulai dengan karakter tertentu sebagai valid text +// // Data LZ-string biasanya dimulai dengan karakter seperti N4Ig, BwIw, dll +// if detectLZStringPattern(result) { +// log.Printf("Data appears to be LZ-string compressed, needs decompression") +// return false +// } + +// // If not JSON, check if it's reasonable text content +// if len(result) > 10 && isReasonableTextContent(result) { +// log.Printf("Decompressed result appears to be valid text content") +// return true +// } + +// return false +// } + +// Fungsi baru untuk mendeteksi pola LZ-string +func detectLZStringPattern(s string) bool { + if len(s) < 10 { + return false + } + + // Pattern umum LZ-string compressed data + commonLZPatterns := []string{ + "N4Ig", "BwIw", "CwIw", "DwIw", "EwIw", "FwIw", "GwIw", "HwIw", + "IwIw", "JwIw", "KwIw", "LwIw", "MwIw", "NwIw", "OwIw", "PwIw", + } + + for _, pattern := range commonLZPatterns { + if strings.HasPrefix(s, pattern) { + return true + } + } + + // Cek apakah string hanya berisi karakter base64 tanpa spasi atau newline + base64Pattern := "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" + if len(s) > 50 { // Only check long strings + invalidChars := 0 + for _, char := range s { + if !strings.ContainsRune(base64Pattern, char) { + invalidChars++ + } + } + // Jika kurang dari 5% karakter invalid, kemungkinan ini LZ-string + if float64(invalidChars)/float64(len(s)) < 0.05 { + return true + } + } + + return false +} + +// func isValidUTF8AndPrintable(s string) bool { +// if !utf8.ValidString(s) { +// return false +// } + +// // Allow common characters: letters, numbers, spaces, and common JSON characters +// for _, r := range s { +// if r < 32 && r != '\n' && r != '\r' && r != '\t' { +// return false // Control characters (except newline, carriage return, tab) +// } +// if r > 126 && r < 160 { +// return false // Extended ASCII control characters +// } +// // Allow Unicode characters above 160 +// } +// return true +// } +func isValidUTF8AndPrintable(s string) bool { + if !utf8.ValidString(s) { + log.Printf("isValidUTF8AndPrintable: String is not valid UTF-8") + return false + } + + // Hitung karakter yang valid + validChars := 0 + totalChars := 0 + + for _, r := range s { + totalChars++ + + if r >= 32 && r <= 126 { // Printable ASCII + validChars++ + } else if r == '\n' || r == '\r' || r == '\t' { // Allowed control chars + validChars++ + } else if r >= 160 { // Unicode characters + validChars++ + } + } + + validRatio := float64(validChars) / float64(totalChars) + log.Printf("isValidUTF8AndPrintable: Valid chars ratio: %.2f (%d/%d)", validRatio, validChars, totalChars) + + // At least 70% should be valid characters + return validRatio >= 0.7 +} +func isReasonableTextContent(s string) bool { + // Count printable characters + printableCount := 0 + for _, r := range s { + if unicode.IsPrint(r) || unicode.IsSpace(r) { + printableCount++ + } + } + + // At least 80% should be printable + return float64(printableCount)/float64(len([]rune(s))) >= 0.8 +} + +// Perbaikan untuk konversi raw bytes ke LZ-string +func tryRawBytesToLZString(data []byte) (string, error) { + // Convert bytes to uint16 array (for UTF-16 decompression) + if len(data)%2 != 0 { + // Pad with zero if odd length + data = append(data, 0) + } + + utf16Data := make([]uint16, len(data)/2) + for i := 0; i < len(data); i += 2 { + utf16Data[i/2] = uint16(data[i]) | (uint16(data[i+1]) << 8) + } + + return lzstring.DecompressFromUTF16(utf16Data) +} +func isValidJSON(data []byte) bool { + if len(data) == 0 { + return false + } + firstChar := data[0] + return firstChar == '{' || firstChar == '[' +} + +func tryGzipDecompression(data []byte) (string, error) { + reader, err := gzip.NewReader(bytes.NewReader(data)) + if err != nil { + return "", err + } + defer reader.Close() + + decompressed, err := io.ReadAll(reader) + if err != nil { + return "", err + } + + return string(decompressed), nil +} + +// stringToUTF16 converts string to []uint16 for UTF16 decompression +func stringToUTF16(s string) ([]uint16, error) { + if len(s) == 0 { + return nil, errors.New("empty string") + } + + // Convert string to runes first + runes := []rune(s) + + // Convert runes to UTF16 + utf16Data := utf16.Encode(runes) + + return utf16Data, nil +} + +// Method dekripsi tanpa padding removal +func tryDecryptWithoutPaddingRemoval(cipherText []byte, block cipher.Block, iv []byte) (string, error) { + if len(cipherText)%aes.BlockSize != 0 { + return "", errors.New("cipherText is not a multiple of the block size") + } + + mode := cipher.NewCBCDecrypter(block, iv) + decrypted := make([]byte, len(cipherText)) + mode.CryptBlocks(decrypted, cipherText) + + log.Printf("tryDecryptWithoutPaddingRemoval: Decrypted length: %d", len(decrypted)) + + // Coba tanpa remove padding dulu + return tryAllDecompressionMethods(decrypted) +} + +func tryDecryptWithCustomIV(cipherText []byte, block cipher.Block, iv []byte) (string, error) { + if len(cipherText)%aes.BlockSize != 0 { + return "", errors.New("cipherText is not a multiple of the block size") + } + + mode := cipher.NewCBCDecrypter(block, iv) + decrypted := make([]byte, len(cipherText)) + mode.CryptBlocks(decrypted, cipherText) + + log.Printf("tryDecryptWithCustomIV: Raw decrypted length: %d", len(decrypted)) + log.Printf("tryDecryptWithCustomIV: Raw first 50 bytes: %q", string(decrypted[:min(50, len(decrypted))])) + + // Try multiple padding removal strategies + paddingStrategies := []func([]byte) []byte{ + helper.RemovePKCS7Padding, + removePaddingManual, + func(data []byte) []byte { return data }, // No padding removal + } + + for i, strategy := range paddingStrategies { + processed := strategy(decrypted) + log.Printf("tryDecryptWithCustomIV: Strategy %d, processed length: %d", i+1, len(processed)) + + if result, err := tryAllDecompressionMethods(processed); err == nil { + log.Printf("tryDecryptWithCustomIV: Success with padding strategy %d", i+1) + return result, nil + } + } + + return "", errors.New("all padding strategies failed") +} + +// Manual padding removal yang lebih agresif +func removePaddingManual(data []byte) []byte { + if len(data) == 0 { + return data + } + + // Coba berbagai kemungkinan padding + for padLen := 1; padLen <= min(16, len(data)); padLen++ { + if data[len(data)-1] == byte(padLen) { + // Check if all padding bytes match + valid := true + start := len(data) - padLen + for i := start; i < len(data); i++ { + if data[i] != byte(padLen) { + valid = false + break + } + } + if valid { + log.Printf("removePaddingManual: Found valid padding of length %d", padLen) + return data[:start] + } + } + } + + // Jika tidak ada padding yang valid, coba buang beberapa byte terakhir + for i := 1; i <= min(16, len(data)); i++ { + trimmed := data[:len(data)-i] + if isLikelyValidData(trimmed) { + log.Printf("removePaddingManual: Trimmed %d bytes, result seems valid", i) + return trimmed + } + } + + return data +} + +// Fungsi untuk mengecek apakah data kemungkinan valid +func isLikelyValidData(data []byte) bool { + if len(data) < 10 { + return false + } + + // Check for common patterns in compressed data or JSON + str := string(data) + + // LZ-string patterns + if strings.HasPrefix(str, "N4Ig") || strings.HasPrefix(str, "BwIw") { + return true + } + + // JSON patterns + if strings.HasPrefix(str, "{") || strings.HasPrefix(str, "[") { + return true + } + + // Gzip magic number + if len(data) >= 2 && data[0] == 0x1f && data[1] == 0x8b { + return true + } + + return false +} +func tryBase64ThenDecompress(data []byte) (string, error) { + decoded, err := base64.StdEncoding.DecodeString(string(data)) + if err != nil { + return "", err + } + + return tryLZStringMethods(decoded) +} + +// Fungsi alternatif untuk menghapus prefix corrupt dari data +func removeCorruptPrefix(data string) string { + // Cari pattern LZ-string yang dikenal + patterns := []string{"EAuUA", "N4Ig", "BwIw", "BAuUA"} + + for _, pattern := range patterns { + if idx := strings.Index(data, pattern); idx >= 0 { + cleaned := data[idx:] + log.Printf("removeCorruptPrefix: Removed %d corrupt bytes, found pattern: %s", idx, pattern) + return cleaned + } + } + + // Jika tidak ada pattern ditemukan, coba hapus karakter non-printable di awal + var result strings.Builder + started := false + + for _, r := range data { + if !started { + // Mulai mengumpulkan karakter setelah bertemu karakter valid + if (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '+' || r == '/' || r == '=' { + started = true + result.WriteRune(r) + } + } else { + result.WriteRune(r) + } + } + + return result.String() +} diff --git a/internal/services/bpjs/vclaimBridge.go b/internal/services/bpjs/vclaimBridge.go new file mode 100644 index 0000000..ba48472 --- /dev/null +++ b/internal/services/bpjs/vclaimBridge.go @@ -0,0 +1,564 @@ +package services + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + "time" + "unicode" + + "api-service/internal/config" + "api-service/internal/models/vclaim/peserta" + + "github.com/mashingan/smapping" + "github.com/rs/zerolog/log" +) + +// VClaimService interface for VClaim operations +type VClaimService interface { + Get(ctx context.Context, endpoint string, result interface{}) error + Post(ctx context.Context, endpoint string, payload interface{}, result interface{}) error + Put(ctx context.Context, endpoint string, payload interface{}, result interface{}) error + Patch(ctx context.Context, endpoint string, payload interface{}, result interface{}) error + Delete(ctx context.Context, endpoint string, result interface{}) error + GetRawResponse(ctx context.Context, endpoint string) (*ResponDTOVclaim, error) + PostRawResponse(ctx context.Context, endpoint string, payload interface{}) (*ResponDTOVclaim, error) + PutRawResponse(ctx context.Context, endpoint string, payload interface{}) (*ResponDTOVclaim, error) + PatchRawResponse(ctx context.Context, endpoint string, payload interface{}) (*ResponDTOVclaim, error) + DeleteRawResponse(ctx context.Context, endpoint string) (*ResponDTOVclaim, error) +} + +// Service struct for VClaim service +type Service struct { + config config.BpjsConfig + httpClient *http.Client +} + +// Response structures +type ResponMentahDTOVclaim struct { + MetaData struct { + Code string `json:"code"` + Message string `json:"message"` + } `json:"metaData"` + Response string `json:"response"` +} + +type ResponDTOVclaim struct { + MetaData struct { + Code string `json:"code"` + Message string `json:"message"` + } `json:"metaData"` + Response interface{} `json:"response"` +} + +// NewService creates a new VClaim service instance +func NewService(cfg config.BpjsConfig) VClaimService { + log.Info(). + Str("base_url", cfg.BaseURL). + Dur("timeout", cfg.Timeout). + Msg("Creating new VClaim service instance") + + service := &Service{ + config: cfg, + httpClient: &http.Client{ + Timeout: cfg.Timeout, + }, + } + return service +} + +// NewServiceFromConfig creates service from main config +func NewServiceFromConfig(cfg *config.Config) VClaimService { + return NewService(cfg.Bpjs) +} + +// NewServiceFromInterface creates service from interface (for backward compatibility) +func NewServiceFromInterface(cfg interface{}) (VClaimService, error) { + var bpjsConfig config.BpjsConfig + + // Try to map from interface + err := smapping.FillStruct(&bpjsConfig, smapping.MapFields(&cfg)) + if err != nil { + return nil, fmt.Errorf("failed to map config: %w", err) + } + + if bpjsConfig.Timeout == 0 { + bpjsConfig.Timeout = 30 * time.Second + } + + return NewService(bpjsConfig), nil +} + +// SetHTTPClient allows custom http client configuration +func (s *Service) SetHTTPClient(client *http.Client) { + s.httpClient = client +} + +// prepareRequest prepares HTTP request with required headers +func (s *Service) prepareRequest(ctx context.Context, method, endpoint string, body io.Reader) (*http.Request, string, string, string, string, error) { + fullURL := s.config.BaseURL + endpoint + + log.Info(). + Str("method", method). + Str("endpoint", endpoint). + Str("full_url", fullURL). + Msg("Preparing HTTP request") + + req, err := http.NewRequestWithContext(ctx, method, fullURL, body) + if err != nil { + log.Error(). + Err(err). + Str("method", method). + Str("endpoint", endpoint). + Msg("Failed to create HTTP request") + return nil, "", "", "", "", fmt.Errorf("failed to create request: %w", err) + } + + // Set headers using the SetHeader method + consID, secretKey, userKey, tstamp, xSignature := s.config.SetHeader() + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("X-cons-id", consID) + req.Header.Set("X-timestamp", tstamp) + req.Header.Set("X-signature", xSignature) + req.Header.Set("user_key", userKey) + + log.Debug(). + Str("method", method). + Str("endpoint", endpoint). + Str("x_cons_id", consID). + Str("x_timestamp", tstamp). + Str("user_key", userKey). + Msg("Request headers set") + + return req, consID, secretKey, tstamp, xSignature, nil +} + +// processResponse processes response from VClaim API +func (s *Service) processResponse(res *http.Response, consID, secretKey, tstamp string) (*ResponDTOVclaim, error) { + defer res.Body.Close() + + body, err := io.ReadAll(res.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %w", err) + } + + if res.StatusCode >= 400 { + return nil, fmt.Errorf("HTTP error: %d - %s", res.StatusCode, string(body)) + } + + // Parse raw response + var respMentah ResponMentahDTOVclaim + if err := json.Unmarshal(body, &respMentah); err != nil { + return nil, fmt.Errorf("failed to unmarshal raw response: %w", err) + } + + // Create final response + finalResp := &ResponDTOVclaim{ + MetaData: respMentah.MetaData, + } + + // Check if response needs decryption + if respMentah.Response == "" { + return finalResp, nil + } + + // Try to parse as JSON first (unencrypted response) + var tempResp interface{} + if json.Unmarshal([]byte(respMentah.Response), &tempResp) == nil { + finalResp.Response = tempResp + return finalResp, nil + } + + // Check if response looks like HTML or error message (don't try to decrypt) + if strings.HasPrefix(respMentah.Response, "<") || strings.Contains(respMentah.Response, "error") { + finalResp.Response = respMentah.Response + return finalResp, nil + } + + // Decrypt response using the same timestamp from the request + decryptionKey := consID + secretKey + tstamp + + log.Debug(). + Str("consID", consID). + Str("tstamp", tstamp). + Int("key_length", len(decryptionKey)). + Msg("Decryption key components") + + respDecrypt, err := ResponseVclaim(respMentah.Response, decryptionKey) + if err != nil { + log.Error().Err(err).Msg("Failed to decrypt response") + return nil, fmt.Errorf("failed to decrypt response: %w", err) + } + + // Try to unmarshal decrypted response as JSON + if respDecrypt != "" { + // Clean the decrypted response + respDecrypt = cleanResponse(respDecrypt) + + // Try multiple cleaning strategies + cleaningStrategies := []string{ + respDecrypt, + strings.TrimLeft(respDecrypt, "\ufeff\xfe\xef\xbb\xbf"), + strings.TrimLeftFunc(respDecrypt, func(r rune) bool { return r < 32 && r != '\n' && r != '\r' && r != '\t' }), + } + + var jsonParseSuccess bool + for i, cleaned := range cleaningStrategies { + if err := json.Unmarshal([]byte(cleaned), &finalResp.Response); err == nil { + log.Info(). + Int("strategy", i+1). + Msg("Successfully parsed JSON with cleaning strategy") + jsonParseSuccess = true + break + } + } + + if !jsonParseSuccess { + // If all JSON parsing fails, store as string + log.Warn().Msg("All JSON parsing strategies failed, storing as string") + finalResp.Response = respDecrypt + } + } + + return finalResp, nil +} + +// Get performs HTTP GET request +func (s *Service) Get(ctx context.Context, endpoint string, result interface{}) error { + resp, err := s.GetRawResponse(ctx, endpoint) + if err != nil { + return err + } + + return mapToResult(resp, result) +} + +// Post performs HTTP POST request +func (s *Service) Post(ctx context.Context, endpoint string, payload interface{}, result interface{}) error { + resp, err := s.PostRawResponse(ctx, endpoint, payload) + if err != nil { + return err + } + + return mapToResult(resp, result) +} + +// Put performs HTTP PUT request +func (s *Service) Put(ctx context.Context, endpoint string, payload interface{}, result interface{}) error { + var buf bytes.Buffer + if payload != nil { + if err := json.NewEncoder(&buf).Encode(payload); err != nil { + return fmt.Errorf("failed to encode payload: %w", err) + } + } + + req, consID, secretKey, tstamp, _, err := s.prepareRequest(ctx, http.MethodPut, endpoint, &buf) + if err != nil { + return err + } + + res, err := s.httpClient.Do(req) + if err != nil { + return fmt.Errorf("failed to execute PUT request: %w", err) + } + + resp, err := s.processResponse(res, consID, secretKey, tstamp) + if err != nil { + return err + } + + return mapToResult(resp, result) +} + +// Delete performs HTTP DELETE request +func (s *Service) Delete(ctx context.Context, endpoint string, result interface{}) error { + req, consID, secretKey, tstamp, _, err := s.prepareRequest(ctx, http.MethodDelete, endpoint, nil) + if err != nil { + return err + } + + res, err := s.httpClient.Do(req) + if err != nil { + return fmt.Errorf("failed to execute DELETE request: %w", err) + } + + resp, err := s.processResponse(res, consID, secretKey, tstamp) + if err != nil { + return err + } + + return mapToResult(resp, result) +} + +// Patch performs HTTP PATCH request +func (s *Service) Patch(ctx context.Context, endpoint string, payload interface{}, result interface{}) error { + var buf bytes.Buffer + if payload != nil { + if err := json.NewEncoder(&buf).Encode(payload); err != nil { + return fmt.Errorf("failed to encode payload: %w", err) + } + } + + req, consID, secretKey, tstamp, _, err := s.prepareRequest(ctx, http.MethodPatch, endpoint, &buf) + if err != nil { + return err + } + + res, err := s.httpClient.Do(req) + if err != nil { + return fmt.Errorf("failed to execute PATCH request: %w", err) + } + + resp, err := s.processResponse(res, consID, secretKey, tstamp) + if err != nil { + return err + } + + return mapToResult(resp, result) +} + +// GetRawResponse returns raw response without mapping +func (s *Service) GetRawResponse(ctx context.Context, endpoint string) (*ResponDTOVclaim, error) { + req, consID, secretKey, tstamp, _, err := s.prepareRequest(ctx, http.MethodGet, endpoint, nil) + if err != nil { + return nil, err + } + + res, err := s.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute GET request: %w", err) + } + + return s.processResponse(res, consID, secretKey, tstamp) +} + +// PostRawResponse returns raw response without mapping +func (s *Service) PostRawResponse(ctx context.Context, endpoint string, payload interface{}) (*ResponDTOVclaim, error) { + var buf bytes.Buffer + if payload != nil { + if err := json.NewEncoder(&buf).Encode(payload); err != nil { + return nil, fmt.Errorf("failed to encode payload: %w", err) + } + } + + req, consID, secretKey, tstamp, _, err := s.prepareRequest(ctx, http.MethodPost, endpoint, &buf) + if err != nil { + return nil, err + } + + res, err := s.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute POST request: %w", err) + } + + return s.processResponse(res, consID, secretKey, tstamp) +} + +// PatchRawResponse returns raw response without mapping +func (s *Service) PatchRawResponse(ctx context.Context, endpoint string, payload interface{}) (*ResponDTOVclaim, error) { + var buf bytes.Buffer + if payload != nil { + if err := json.NewEncoder(&buf).Encode(payload); err != nil { + return nil, fmt.Errorf("failed to encode payload: %w", err) + } + } + + req, consID, secretKey, tstamp, _, err := s.prepareRequest(ctx, http.MethodPatch, endpoint, &buf) + if err != nil { + return nil, err + } + + res, err := s.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute PATCH request: %w", err) + } + + return s.processResponse(res, consID, secretKey, tstamp) +} + +// PutRawResponse returns raw response without mapping +func (s *Service) PutRawResponse(ctx context.Context, endpoint string, payload interface{}) (*ResponDTOVclaim, error) { + var buf bytes.Buffer + if payload != nil { + if err := json.NewEncoder(&buf).Encode(payload); err != nil { + return nil, fmt.Errorf("failed to encode payload: %w", err) + } + } + + req, consID, secretKey, tstamp, _, err := s.prepareRequest(ctx, http.MethodPut, endpoint, &buf) + if err != nil { + return nil, err + } + + res, err := s.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute PUT request: %w", err) + } + + return s.processResponse(res, consID, secretKey, tstamp) +} + +// DeleteRawResponse returns raw response without mapping +func (s *Service) DeleteRawResponse(ctx context.Context, endpoint string) (*ResponDTOVclaim, error) { + req, consID, secretKey, tstamp, _, err := s.prepareRequest(ctx, http.MethodDelete, endpoint, nil) + if err != nil { + return nil, err + } + + res, err := s.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute DELETE request: %w", err) + } + + return s.processResponse(res, consID, secretKey, tstamp) +} + +// mapToResult maps the final response to the result interface +func mapToResult(resp *ResponDTOVclaim, result interface{}) error { + respBytes, err := json.Marshal(resp) + if err != nil { + return fmt.Errorf("failed to marshal final response: %w", err) + } + + if err := json.Unmarshal(respBytes, result); err != nil { + return fmt.Errorf("failed to unmarshal to result: %w", err) + } + + // Handle BPJS peserta response structure + if pesertaResp, ok := result.(*peserta.PesertaResponse); ok { + if resp.Response != nil { + if responseMap, ok := resp.Response.(map[string]interface{}); ok { + if pesertaMap, ok := responseMap["peserta"]; ok { + pesertaBytes, _ := json.Marshal(pesertaMap) + var pd peserta.PesertaData + json.Unmarshal(pesertaBytes, &pd) + pesertaResp.Data = &pd + } + } + } + } + + return nil +} + +// Backward compatibility functions +func GetRequest(endpoint string, cfg interface{}) interface{} { + service, err := NewServiceFromInterface(cfg) + if err != nil { + fmt.Printf("Failed to create service: %v\n", err) + return nil + } + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + resp, err := service.GetRawResponse(ctx, endpoint) + if err != nil { + fmt.Printf("Failed to get response: %v\n", err) + return nil + } + + return resp +} + +func PostRequest(endpoint string, cfg interface{}, data interface{}) interface{} { + service, err := NewServiceFromInterface(cfg) + if err != nil { + fmt.Printf("Failed to create service: %v\n", err) + return nil + } + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + resp, err := service.PostRawResponse(ctx, endpoint, data) + if err != nil { + fmt.Printf("Failed to post response: %v\n", err) + return nil + } + + return resp +} + +func cleanResponse(s string) string { + // Remove UTF-8 BOM dan variasi BOM lainnya + s = strings.TrimPrefix(s, "\xef\xbb\xbf") // UTF-8 BOM + s = strings.TrimPrefix(s, "\ufeff") // Unicode BOM + s = strings.TrimPrefix(s, "\ufffe") // Unicode BOM (reverse) + s = strings.TrimPrefix(s, "\xff\xfe") // UTF-16 LE BOM + s = strings.TrimPrefix(s, "\xfe\xff") // UTF-16 BE BOM + + // Remove karakter control dan non-printable + var result strings.Builder + for _, r := range s { + if r >= 32 && r <= 126 || r == '\n' || r == '\r' || r == '\t' { + result.WriteRune(r) + } else if r > 126 && unicode.IsPrint(r) { + // Allow Unicode printable characters + result.WriteRune(r) + } + // Skip semua karakter lainnya (termasuk BOM fragments) + } + + cleaned := result.String() + cleaned = strings.TrimSpace(cleaned) + + // Cari dan ekstrak JSON yang valid + if idx := strings.Index(cleaned, "{"); idx >= 0 { + cleaned = cleaned[idx:] + // Find matching closing brace + if endIdx := findMatchingBrace(cleaned); endIdx > 0 { + cleaned = cleaned[:endIdx+1] + } + } + + log.Printf("cleanResponse: Final cleaned length: %d", len(cleaned)) + log.Printf("cleanResponse: Final result preview: %s", cleaned[:min(200, len(cleaned))]) + return cleaned +} + +// Fungsi helper untuk menemukan closing brace yang matching +func findMatchingBrace(s string) int { + if len(s) == 0 || s[0] != '{' { + return -1 + } + + braceCount := 0 + inString := false + escaped := false + + for i, char := range s { + if escaped { + escaped = false + continue + } + + if char == '\\' { + escaped = true + continue + } + + if char == '"' && !escaped { + inString = !inString + continue + } + + if !inString { + if char == '{' { + braceCount++ + } else if char == '}' { + braceCount-- + if braceCount == 0 { + return i + } + } + } + } + + return -1 +} diff --git a/internal/utils/filters/dynamic_filter.go b/internal/utils/filters/dynamic_filter.go new file mode 100644 index 0000000..d735ce2 --- /dev/null +++ b/internal/utils/filters/dynamic_filter.go @@ -0,0 +1,593 @@ +package utils + +import ( + "fmt" + "reflect" + "strings" + "sync" +) + +// FilterOperator represents supported filter operators +type FilterOperator string + +const ( + OpEqual FilterOperator = "_eq" + OpNotEqual FilterOperator = "_neq" + OpLike FilterOperator = "_like" + OpILike FilterOperator = "_ilike" + OpIn FilterOperator = "_in" + OpNotIn FilterOperator = "_nin" + OpGreaterThan FilterOperator = "_gt" + OpGreaterThanEqual FilterOperator = "_gte" + OpLessThan FilterOperator = "_lt" + OpLessThanEqual FilterOperator = "_lte" + OpBetween FilterOperator = "_between" + OpNotBetween FilterOperator = "_nbetween" + OpNull FilterOperator = "_null" + OpNotNull FilterOperator = "_nnull" + OpContains FilterOperator = "_contains" + OpNotContains FilterOperator = "_ncontains" + OpStartsWith FilterOperator = "_starts_with" + OpEndsWith FilterOperator = "_ends_with" +) + +// DynamicFilter represents a single filter condition +type DynamicFilter struct { + Column string `json:"column"` + Operator FilterOperator `json:"operator"` + Value interface{} `json:"value"` + LogicOp string `json:"logic_op,omitempty"` // AND, OR +} + +// FilterGroup represents a group of filters +type FilterGroup struct { + Filters []DynamicFilter `json:"filters"` + LogicOp string `json:"logic_op"` // AND, OR +} + +// DynamicQuery represents the complete query structure +type DynamicQuery struct { + Fields []string `json:"fields,omitempty"` + Filters []FilterGroup `json:"filters,omitempty"` + Sort []SortField `json:"sort,omitempty"` + Limit int `json:"limit"` + Offset int `json:"offset"` + GroupBy []string `json:"group_by,omitempty"` + Having []FilterGroup `json:"having,omitempty"` +} + +// SortField represents sorting configuration +type SortField struct { + Column string `json:"column"` + Order string `json:"order"` // ASC, DESC +} + +// QueryBuilder builds SQL queries from dynamic filters +type QueryBuilder struct { + tableName string + columnMapping map[string]string // Maps API field names to DB column names + allowedColumns map[string]bool // Security: only allow specified columns + paramCounter int + mu *sync.RWMutex +} + +// NewQueryBuilder creates a new query builder instance +func NewQueryBuilder(tableName string) *QueryBuilder { + return &QueryBuilder{ + tableName: tableName, + columnMapping: make(map[string]string), + allowedColumns: make(map[string]bool), + paramCounter: 0, + } +} + +// SetColumnMapping sets the mapping between API field names and database column names +func (qb *QueryBuilder) SetColumnMapping(mapping map[string]string) *QueryBuilder { + qb.columnMapping = mapping + return qb +} + +// SetAllowedColumns sets the list of allowed columns for security +func (qb *QueryBuilder) SetAllowedColumns(columns []string) *QueryBuilder { + qb.allowedColumns = make(map[string]bool) + for _, col := range columns { + qb.allowedColumns[col] = true + } + return qb +} + +// BuildQuery builds the complete SQL query +func (qb *QueryBuilder) BuildQuery(query DynamicQuery) (string, []interface{}, error) { + qb.paramCounter = 0 + + // Build SELECT clause + selectClause := qb.buildSelectClause(query.Fields) + + // Build FROM clause + fromClause := fmt.Sprintf("FROM %s", qb.tableName) + + // Build WHERE clause + whereClause, whereArgs, err := qb.buildWhereClause(query.Filters) + if err != nil { + return "", nil, err + } + + // Build ORDER BY clause + orderClause := qb.buildOrderClause(query.Sort) + + // Build GROUP BY clause + groupClause := qb.buildGroupByClause(query.GroupBy) + + // Build HAVING clause + havingClause, havingArgs, err := qb.buildHavingClause(query.Having) + if err != nil { + return "", nil, err + } + + // Combine all parts + sqlParts := []string{selectClause, fromClause} + args := []interface{}{} + + if whereClause != "" { + sqlParts = append(sqlParts, "WHERE "+whereClause) + args = append(args, whereArgs...) + } + + if groupClause != "" { + sqlParts = append(sqlParts, groupClause) + } + + if havingClause != "" { + sqlParts = append(sqlParts, "HAVING "+havingClause) + args = append(args, havingArgs...) + } + + if orderClause != "" { + sqlParts = append(sqlParts, orderClause) + } + + // Add pagination + if query.Limit > 0 { + qb.paramCounter++ + sqlParts = append(sqlParts, fmt.Sprintf("LIMIT $%d", qb.paramCounter)) + args = append(args, query.Limit) + } + + if query.Offset > 0 { + qb.paramCounter++ + sqlParts = append(sqlParts, fmt.Sprintf("OFFSET $%d", qb.paramCounter)) + args = append(args, query.Offset) + } + + sql := strings.Join(sqlParts, " ") + return sql, args, nil +} + +// buildSelectClause builds the SELECT part of the query +func (qb *QueryBuilder) buildSelectClause(fields []string) string { + if len(fields) == 0 || (len(fields) == 1 && fields[0] == "*") { + return "SELECT *" + } + + var selectedFields []string + for _, field := range fields { + if field == "*.*" || field == "*" { + selectedFields = append(selectedFields, "*") + continue + } + + // Check if it's an expression (contains spaces, parentheses, etc.) + if strings.Contains(field, " ") || strings.Contains(field, "(") || strings.Contains(field, ")") { + // Expression, add as is + selectedFields = append(selectedFields, field) + continue + } + + // Security check: only allow specified columns (check original field name) + if len(qb.allowedColumns) > 0 && !qb.allowedColumns[field] { + continue + } + + // Map field name if mapping exists + if mappedCol, exists := qb.columnMapping[field]; exists { + field = mappedCol + } + + selectedFields = append(selectedFields, fmt.Sprintf(`"%s"`, field)) + } + + if len(selectedFields) == 0 { + return "SELECT *" + } + + return "SELECT " + strings.Join(selectedFields, ", ") +} + +// buildWhereClause builds the WHERE part of the query +func (qb *QueryBuilder) buildWhereClause(filterGroups []FilterGroup) (string, []interface{}, error) { + if len(filterGroups) == 0 { + return "", nil, nil + } + + var conditions []string + var args []interface{} + + for i, group := range filterGroups { + groupCondition, groupArgs, err := qb.buildFilterGroup(group) + if err != nil { + return "", nil, err + } + + if groupCondition != "" { + if i > 0 { + logicOp := "AND" + if group.LogicOp != "" { + logicOp = strings.ToUpper(group.LogicOp) + } + conditions = append(conditions, logicOp) + } + + conditions = append(conditions, groupCondition) + args = append(args, groupArgs...) + } + } + + return strings.Join(conditions, " "), args, nil +} + +// buildFilterGroup builds conditions for a filter group +func (qb *QueryBuilder) buildFilterGroup(group FilterGroup) (string, []interface{}, error) { + if len(group.Filters) == 0 { + return "", nil, nil + } + + var conditions []string + var args []interface{} + + for i, filter := range group.Filters { + condition, filterArgs, err := qb.buildFilterCondition(filter) + if err != nil { + return "", nil, err + } + + if condition != "" { + if i > 0 { + logicOp := "AND" + if filter.LogicOp != "" { + logicOp = strings.ToUpper(filter.LogicOp) + } else if group.LogicOp != "" { + logicOp = strings.ToUpper(group.LogicOp) + } + conditions = append(conditions, logicOp) + } + + conditions = append(conditions, condition) + args = append(args, filterArgs...) + } + } + + return strings.Join(conditions, " "), args, nil +} + +// buildFilterCondition builds a single filter condition +func (qb *QueryBuilder) buildFilterCondition(filter DynamicFilter) (string, []interface{}, error) { + // Security check (check original field name) + if len(qb.allowedColumns) > 0 && !qb.allowedColumns[filter.Column] { + return "", nil, nil + } + + // Map column name if mapping exists + column := filter.Column + if mappedCol, exists := qb.columnMapping[column]; exists { + column = mappedCol + } + + // Wrap column name in quotes for PostgreSQL + column = fmt.Sprintf(`"%s"`, column) + + switch filter.Operator { + case OpEqual: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s = $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpNotEqual: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s != $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpLike: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s LIKE $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpILike: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s ILIKE $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpIn: + values := qb.parseArrayValue(filter.Value) + if len(values) == 0 { + return "", nil, nil + } + + var placeholders []string + var args []interface{} + for _, val := range values { + qb.paramCounter++ + placeholders = append(placeholders, fmt.Sprintf("$%d", qb.paramCounter)) + args = append(args, val) + } + + return fmt.Sprintf("%s IN (%s)", column, strings.Join(placeholders, ", ")), args, nil + + case OpNotIn: + values := qb.parseArrayValue(filter.Value) + if len(values) == 0 { + return "", nil, nil + } + + var placeholders []string + var args []interface{} + for _, val := range values { + qb.paramCounter++ + placeholders = append(placeholders, fmt.Sprintf("$%d", qb.paramCounter)) + args = append(args, val) + } + + return fmt.Sprintf("%s NOT IN (%s)", column, strings.Join(placeholders, ", ")), args, nil + + case OpGreaterThan: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s > $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpGreaterThanEqual: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s >= $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpLessThan: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s < $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpLessThanEqual: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + return fmt.Sprintf("%s <= $%d", column, qb.paramCounter), []interface{}{filter.Value}, nil + + case OpBetween: + if filter.Value == nil { + return "", nil, nil + } + values := qb.parseArrayValue(filter.Value) + if len(values) != 2 { + return "", nil, fmt.Errorf("between operator requires exactly 2 values") + } + qb.paramCounter++ + param1 := qb.paramCounter + qb.paramCounter++ + param2 := qb.paramCounter + return fmt.Sprintf("%s BETWEEN $%d AND $%d", column, param1, param2), []interface{}{values[0], values[1]}, nil + + case OpNotBetween: + if filter.Value == nil { + return "", nil, nil + } + values := qb.parseArrayValue(filter.Value) + if len(values) != 2 { + return "", nil, fmt.Errorf("not between operator requires exactly 2 values") + } + qb.paramCounter++ + param1 := qb.paramCounter + qb.paramCounter++ + param2 := qb.paramCounter + return fmt.Sprintf("%s NOT BETWEEN $%d AND $%d", column, param1, param2), []interface{}{values[0], values[1]}, nil + + case OpNull: + return fmt.Sprintf("%s IS NULL", column), nil, nil + + case OpNotNull: + return fmt.Sprintf("%s IS NOT NULL", column), nil, nil + + case OpContains: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + value := fmt.Sprintf("%%%v%%", filter.Value) + return fmt.Sprintf("%s ILIKE $%d", column, qb.paramCounter), []interface{}{value}, nil + + case OpNotContains: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + value := fmt.Sprintf("%%%v%%", filter.Value) + return fmt.Sprintf("%s NOT ILIKE $%d", column, qb.paramCounter), []interface{}{value}, nil + + case OpStartsWith: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + value := fmt.Sprintf("%v%%", filter.Value) + return fmt.Sprintf("%s ILIKE $%d", column, qb.paramCounter), []interface{}{value}, nil + + case OpEndsWith: + if filter.Value == nil { + return "", nil, nil + } + qb.paramCounter++ + value := fmt.Sprintf("%%%v", filter.Value) + return fmt.Sprintf("%s ILIKE $%d", column, qb.paramCounter), []interface{}{value}, nil + + default: + return "", nil, fmt.Errorf("unsupported operator: %s", filter.Operator) + } +} + +// parseArrayValue parses array values from various formats +func (qb *QueryBuilder) parseArrayValue(value interface{}) []interface{} { + if value == nil { + return nil + } + + // If it's already a slice + if reflect.TypeOf(value).Kind() == reflect.Slice { + v := reflect.ValueOf(value) + result := make([]interface{}, v.Len()) + for i := 0; i < v.Len(); i++ { + result[i] = v.Index(i).Interface() + } + return result + } + + // If it's a string, try to split by comma + if str, ok := value.(string); ok { + if strings.Contains(str, ",") { + parts := strings.Split(str, ",") + result := make([]interface{}, len(parts)) + for i, part := range parts { + result[i] = strings.TrimSpace(part) + } + return result + } + return []interface{}{str} + } + + return []interface{}{value} +} + +// buildOrderClause builds the ORDER BY clause +func (qb *QueryBuilder) buildOrderClause(sortFields []SortField) string { + if len(sortFields) == 0 { + return "" + } + + var orderParts []string + for _, sort := range sortFields { + column := sort.Column + + // Security check (check original field name) + if len(qb.allowedColumns) > 0 && !qb.allowedColumns[column] { + continue + } + + if mappedCol, exists := qb.columnMapping[column]; exists { + column = mappedCol + } + + order := "ASC" + if sort.Order != "" { + order = strings.ToUpper(sort.Order) + } + + orderParts = append(orderParts, fmt.Sprintf(`"%s" %s`, column, order)) + } + + if len(orderParts) == 0 { + return "" + } + + return "ORDER BY " + strings.Join(orderParts, ", ") +} + +// buildGroupByClause builds the GROUP BY clause +func (qb *QueryBuilder) buildGroupByClause(groupFields []string) string { + if len(groupFields) == 0 { + return "" + } + + var groupParts []string + for _, field := range groupFields { + column := field + if mappedCol, exists := qb.columnMapping[column]; exists { + column = mappedCol + } + + // Security check + if len(qb.allowedColumns) > 0 && !qb.allowedColumns[column] { + continue + } + + groupParts = append(groupParts, fmt.Sprintf(`"%s"`, column)) + } + + if len(groupParts) == 0 { + return "" + } + + return "GROUP BY " + strings.Join(groupParts, ", ") +} + +// buildHavingClause builds the HAVING clause +func (qb *QueryBuilder) buildHavingClause(havingGroups []FilterGroup) (string, []interface{}, error) { + if len(havingGroups) == 0 { + return "", nil, nil + } + + return qb.buildWhereClause(havingGroups) +} + +// BuildCountQuery builds a count query +func (qb *QueryBuilder) BuildCountQuery(query DynamicQuery) (string, []interface{}, error) { + qb.paramCounter = 0 + + // Build FROM clause + fromClause := fmt.Sprintf("FROM %s", qb.tableName) + + // Build WHERE clause + whereClause, whereArgs, err := qb.buildWhereClause(query.Filters) + if err != nil { + return "", nil, err + } + + // Build GROUP BY clause + groupClause := qb.buildGroupByClause(query.GroupBy) + + // Build HAVING clause + havingClause, havingArgs, err := qb.buildHavingClause(query.Having) + if err != nil { + return "", nil, err + } + + // Combine parts + sqlParts := []string{"SELECT COUNT(*)", fromClause} + args := []interface{}{} + + if whereClause != "" { + sqlParts = append(sqlParts, "WHERE "+whereClause) + args = append(args, whereArgs...) + } + + if groupClause != "" { + sqlParts = append(sqlParts, groupClause) + } + + if havingClause != "" { + sqlParts = append(sqlParts, "HAVING "+havingClause) + args = append(args, havingArgs...) + } + + sql := strings.Join(sqlParts, " ") + return sql, args, nil +} diff --git a/internal/utils/filters/query_parser.go b/internal/utils/filters/query_parser.go new file mode 100644 index 0000000..6b6f07e --- /dev/null +++ b/internal/utils/filters/query_parser.go @@ -0,0 +1,241 @@ +package utils + +import ( + "net/url" + "strconv" + "strings" + "time" +) + +// QueryParser parses HTTP query parameters into DynamicQuery +type QueryParser struct { + defaultLimit int + maxLimit int +} + +// NewQueryParser creates a new query parser +func NewQueryParser() *QueryParser { + return &QueryParser{ + defaultLimit: 10, + maxLimit: 100, + } +} + +// SetLimits sets default and maximum limits +func (qp *QueryParser) SetLimits(defaultLimit, maxLimit int) *QueryParser { + qp.defaultLimit = defaultLimit + qp.maxLimit = maxLimit + return qp +} + +// ParseQuery parses URL query parameters into DynamicQuery +func (qp *QueryParser) ParseQuery(values url.Values) (DynamicQuery, error) { + query := DynamicQuery{ + Limit: qp.defaultLimit, + Offset: 0, + } + + // Parse fields + if fields := values.Get("fields"); fields != "" { + if fields == "*.*" || fields == "*" { + query.Fields = []string{"*"} + } else { + query.Fields = strings.Split(fields, ",") + for i, field := range query.Fields { + query.Fields[i] = strings.TrimSpace(field) + } + } + } + + // Parse pagination + if limit := values.Get("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil { + if l > 0 && l <= qp.maxLimit { + query.Limit = l + } + } + } + + if offset := values.Get("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Parse filters + filters, err := qp.parseFilters(values) + if err != nil { + return query, err + } + query.Filters = filters + + // Parse sorting + sorts, err := qp.parseSorting(values) + if err != nil { + return query, err + } + query.Sort = sorts + + // Parse group by + if groupBy := values.Get("group"); groupBy != "" { + query.GroupBy = strings.Split(groupBy, ",") + for i, field := range query.GroupBy { + query.GroupBy[i] = strings.TrimSpace(field) + } + } + + return query, nil +} + +// parseFilters parses filter parameters +// Supports format: filter[column][operator]=value +func (qp *QueryParser) parseFilters(values url.Values) ([]FilterGroup, error) { + filterMap := make(map[string]map[string]string) + + // Group filters by column + for key, vals := range values { + if strings.HasPrefix(key, "filter[") && strings.HasSuffix(key, "]") { + // Parse filter[column][operator] format + parts := strings.Split(key[7:len(key)-1], "][") + if len(parts) == 2 { + column := parts[0] + operator := parts[1] + + if filterMap[column] == nil { + filterMap[column] = make(map[string]string) + } + + if len(vals) > 0 { + filterMap[column][operator] = vals[0] + } + } + } + } + + if len(filterMap) == 0 { + return nil, nil + } + + // Convert to FilterGroup + var filters []DynamicFilter + + for column, operators := range filterMap { + for opStr, value := range operators { + operator := FilterOperator(opStr) + + // Parse value based on operator + var parsedValue interface{} + switch operator { + case OpIn, OpNotIn: + if value != "" { + parsedValue = strings.Split(value, ",") + } + case OpBetween, OpNotBetween: + if value != "" { + parts := strings.Split(value, ",") + if len(parts) == 2 { + parsedValue = []interface{}{strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1])} + } + } + case OpNull, OpNotNull: + parsedValue = nil + default: + parsedValue = value + } + + filters = append(filters, DynamicFilter{ + Column: column, + Operator: operator, + Value: parsedValue, + }) + } + } + + if len(filters) == 0 { + return nil, nil + } + + return []FilterGroup{{ + Filters: filters, + LogicOp: "AND", + }}, nil +} + +// parseSorting parses sort parameters +// Supports format: sort=column1,-column2 (- for DESC) +func (qp *QueryParser) parseSorting(values url.Values) ([]SortField, error) { + sortParam := values.Get("sort") + if sortParam == "" { + return nil, nil + } + + var sorts []SortField + fields := strings.Split(sortParam, ",") + + for _, field := range fields { + field = strings.TrimSpace(field) + if field == "" { + continue + } + + order := "ASC" + column := field + + if strings.HasPrefix(field, "-") { + order = "DESC" + column = field[1:] + } else if strings.HasPrefix(field, "+") { + column = field[1:] + } + + sorts = append(sorts, SortField{ + Column: column, + Order: order, + }) + } + + return sorts, nil +} + +// ParseAdvancedFilters parses complex filter structures +// Supports nested filters and logic operators +func (qp *QueryParser) ParseAdvancedFilters(filterParam string) ([]FilterGroup, error) { + // This would be for more complex JSON-based filters + // Implementation depends on your specific needs + return nil, nil +} + +// Helper function to parse date values +func parseDate(value string) (interface{}, error) { + // Try different date formats + formats := []string{ + "2006-01-02", + "2006-01-02T15:04:05Z", + "2006-01-02T15:04:05.000Z", + "2006-01-02 15:04:05", + } + + for _, format := range formats { + if t, err := time.Parse(format, value); err == nil { + return t, nil + } + } + + return value, nil +} + +// Helper function to parse numeric values +func parseNumeric(value string) interface{} { + // Try integer first + if i, err := strconv.Atoi(value); err == nil { + return i + } + + // Try float + if f, err := strconv.ParseFloat(value, 64); err == nil { + return f + } + + // Return as string + return value +} diff --git a/internal/utils/validation/duplicate_validator.go b/internal/utils/validation/duplicate_validator.go new file mode 100644 index 0000000..863c058 --- /dev/null +++ b/internal/utils/validation/duplicate_validator.go @@ -0,0 +1,141 @@ +package validation + +import ( + "context" + "database/sql" + "fmt" + "time" +) + +// ValidationConfig holds configuration for duplicate validation +type ValidationConfig struct { + TableName string + IDColumn string + StatusColumn string + DateColumn string + ActiveStatuses []string + AdditionalFields map[string]interface{} +} + +// DuplicateValidator provides methods for validating duplicate entries +type DuplicateValidator struct { + db *sql.DB +} + +// NewDuplicateValidator creates a new instance of DuplicateValidator +func NewDuplicateValidator(db *sql.DB) *DuplicateValidator { + return &DuplicateValidator{db: db} +} + +// ValidateDuplicate checks for duplicate entries based on the provided configuration +func (dv *DuplicateValidator) ValidateDuplicate(ctx context.Context, config ValidationConfig, identifier interface{}) error { + query := fmt.Sprintf(` + SELECT COUNT(*) + FROM %s + WHERE %s = $1 + AND %s = ANY($2) + AND DATE(%s) = CURRENT_DATE + `, config.TableName, config.IDColumn, config.StatusColumn, config.DateColumn) + + var count int + err := dv.db.QueryRowContext(ctx, query, identifier, config.ActiveStatuses).Scan(&count) + if err != nil { + return fmt.Errorf("failed to check duplicate: %w", err) + } + + if count > 0 { + return fmt.Errorf("data with ID %v already exists with active status today", identifier) + } + + return nil +} + +// ValidateDuplicateWithCustomFields checks for duplicates with additional custom fields +func (dv *DuplicateValidator) ValidateDuplicateWithCustomFields(ctx context.Context, config ValidationConfig, fields map[string]interface{}) error { + whereClause := fmt.Sprintf("%s = ANY($1) AND DATE(%s) = CURRENT_DATE", config.StatusColumn, config.DateColumn) + args := []interface{}{config.ActiveStatuses} + argIndex := 2 + + // Add additional field conditions + for fieldName, fieldValue := range config.AdditionalFields { + whereClause += fmt.Sprintf(" AND %s = $%d", fieldName, argIndex) + args = append(args, fieldValue) + argIndex++ + } + + // Add dynamic fields + for fieldName, fieldValue := range fields { + whereClause += fmt.Sprintf(" AND %s = $%d", fieldName, argIndex) + args = append(args, fieldValue) + argIndex++ + } + + query := fmt.Sprintf("SELECT COUNT(*) FROM %s WHERE %s", config.TableName, whereClause) + + var count int + err := dv.db.QueryRowContext(ctx, query, args...).Scan(&count) + if err != nil { + return fmt.Errorf("failed to check duplicate with custom fields: %w", err) + } + + if count > 0 { + return fmt.Errorf("duplicate entry found with the specified criteria") + } + + return nil +} + +// ValidateOncePerDay ensures only one submission per day for a given identifier +func (dv *DuplicateValidator) ValidateOncePerDay(ctx context.Context, tableName, idColumn, dateColumn string, identifier interface{}) error { + query := fmt.Sprintf(` + SELECT COUNT(*) + FROM %s + WHERE %s = $1 + AND DATE(%s) = CURRENT_DATE + `, tableName, idColumn, dateColumn) + + var count int + err := dv.db.QueryRowContext(ctx, query, identifier).Scan(&count) + if err != nil { + return fmt.Errorf("failed to check daily submission: %w", err) + } + + if count > 0 { + return fmt.Errorf("only one submission allowed per day for ID %v", identifier) + } + + return nil +} + +// GetLastSubmissionTime returns the last submission time for a given identifier +func (dv *DuplicateValidator) GetLastSubmissionTime(ctx context.Context, tableName, idColumn, dateColumn string, identifier interface{}) (*time.Time, error) { + query := fmt.Sprintf(` + SELECT %s + FROM %s + WHERE %s = $1 + ORDER BY %s DESC + LIMIT 1 + `, dateColumn, tableName, idColumn, dateColumn) + + var lastTime time.Time + err := dv.db.QueryRowContext(ctx, query, identifier).Scan(&lastTime) + if err != nil { + if err == sql.ErrNoRows { + return nil, nil // No previous submission + } + return nil, fmt.Errorf("failed to get last submission time: %w", err) + } + + return &lastTime, nil +} + +// DefaultRetribusiConfig returns default configuration for retribusi validation +func DefaultRetribusiConfig() ValidationConfig { + return ValidationConfig{ + TableName: "data_retribusi", + IDColumn: "id", + StatusColumn: "status", + DateColumn: "date_created", + ActiveStatuses: []string{"active", "draft"}, + } +} diff --git a/pkg/logger/README.md b/pkg/logger/README.md new file mode 100644 index 0000000..918edda --- /dev/null +++ b/pkg/logger/README.md @@ -0,0 +1,356 @@ +# Structured Logger Package + +A comprehensive structured logging package for Go applications with support for different log levels, service-specific logging, request context, and JSON output formatting. + +## Features + +- **Structured Logging**: JSON and text format output with rich metadata +- **Multiple Log Levels**: DEBUG, INFO, WARN, ERROR, FATAL +- **Service-Specific Logging**: Dedicated loggers for different services +- **Request Context**: Request ID and correlation ID tracking +- **Performance Timing**: Built-in duration logging for operations +- **Gin Middleware**: Request logging middleware for HTTP requests +- **Environment Configuration**: Configurable via environment variables + +## Installation + +The logger is already integrated into the project. Import it using: + +```go +import "api-service/pkg/logger" +``` + +## Quick Start + +### Basic Usage + +```go +// Global functions (use default logger) +logger.Info("Application starting") +logger.Error("Something went wrong", map[string]interface{}{ + "error": err.Error(), + "code": "DB_CONNECTION_FAILED", +}) + +// Create a service-specific logger +authLogger := logger.ServiceLogger("auth-service") +authLogger.Info("User authenticated", map[string]interface{}{ + "user_id": "123", + "method": "oauth2", +}) +``` + +### Service-Specific Loggers + +```go +// Pre-defined service loggers +authLogger := logger.AuthServiceLogger() +bpjsLogger := logger.BPJSServiceLogger() +retribusiLogger := logger.RetribusiServiceLogger() +databaseLogger := logger.DatabaseServiceLogger() + +authLogger.Info("Authentication successful") +databaseLogger.Debug("Query executed", map[string]interface{}{ + "query": "SELECT * FROM users", + "time": "150ms", +}) +``` + +### Request Context Logging + +```go +// Add request context to logs +requestLogger := logger.Default(). + WithRequestID("req-123456"). + WithCorrelationID("corr-789012"). + WithField("user_id", "user-123") + +requestLogger.Info("Request processing started", map[string]interface{}{ + "endpoint": "/api/v1/data", + "method": "POST", +}) +``` + +### Performance Timing + +```go +// Time operations and log duration +start := time.Now() +// ... perform operation ... +logger.LogDuration(start, "Database query completed", map[string]interface{}{ + "query": "SELECT * FROM large_table", + "rows": 1000, + "database": "postgres", +}) +``` + +## Gin Middleware Integration + +### Add Request Logger Middleware + +In your routes setup: + +```go +import "api-service/pkg/logger" + +func RegisterRoutes(cfg *config.Config) *gin.Engine { + router := gin.New() + + // Add request logging middleware + router.Use(logger.RequestLoggerMiddleware(logger.Default())) + + // ... other middleware and routes + return router +} +``` + +### Access Logger in Handlers + +```go +func (h *MyHandler) MyEndpoint(c *gin.Context) { + // Get logger from context + logger := logger.GetLoggerFromContext(c) + + logger.Info("Endpoint called", map[string]interface{}{ + "user_agent": c.Request.UserAgent(), + "client_ip": c.ClientIP(), + }) + + // Get request IDs + requestID := logger.GetRequestIDFromContext(c) + correlationID := logger.GetCorrelationIDFromContext(c) +} +``` + +## Configuration + +### Environment Variables + +Set these environment variables to configure the logger: + +```bash +# Log level (DEBUG, INFO, WARN, ERROR, FATAL) +LOG_LEVEL=INFO + +# Output format (text or json) +LOG_FORMAT=text + +# Service name for logs +LOG_SERVICE=api-service + +# Enable JSON format +LOG_JSON=false +``` + +### Programmatic Configuration + +```go +// Create custom logger with specific configuration +cfg := logger.Config{ + Level: "DEBUG", + JSONFormat: true, + Service: "my-custom-service", +} + +customLogger := logger.NewFromConfig(cfg) + +// Or create manually +logger := logger.New("service-name", logger.DEBUG, true) +``` + +## Log Levels + +| Level | Description | Usage | +|-------|-------------|-------| +| DEBUG | Detailed debug information | Development and troubleshooting | +| INFO | General operational messages | Normal application behavior | +| WARN | Warning conditions | Something unexpected but not an error | +| ERROR | Error conditions | Operation failed but application continues | +| FATAL | Critical conditions | Application cannot continue | + +## Output Formats + +### Text Format (Default) +``` +2025-08-22T04:33:12+07:00 [INFO] auth-service: User authentication successful (handler/auth.go:45) [user_id=12345 method=oauth2] +``` + +### JSON Format +```json +{ + "timestamp": "2025-08-22T04:33:12+07:00", + "level": "INFO", + "service": "auth-service", + "message": "User authentication successful", + "file": "handler/auth.go", + "line": 45, + "request_id": "req-123456", + "correlation_id": "corr-789012", + "fields": { + "user_id": "12345", + "method": "oauth2" + } +} +``` + +## Best Practices + +### 1. Use Appropriate Log Levels +```go +// Good +logger.Debug("Detailed debug info") +logger.Info("User action completed") +logger.Warn("Rate limit approaching") +logger.Error("Database connection failed") + +// Avoid +logger.Info("Error connecting to database") // Use ERROR instead +``` + +### 2. Add Context to Logs +```go +// Instead of this: +logger.Error("Login failed") + +// Do this: +logger.Error("Login failed", map[string]interface{}{ + "username": username, + "reason": "invalid_credentials", + "attempts": loginAttempts, + "client_ip": clientIP, +}) +``` + +### 3. Use Service-Specific Loggers +```go +// Create once per service +var authLogger = logger.AuthServiceLogger() + +func LoginHandler(c *gin.Context) { + authLogger.Info("Login attempt", map[string]interface{}{ + "username": c.PostForm("username"), + }) +} +``` + +### 4. Measure Performance +```go +func ProcessData(data []byte) error { + start := time.Now() + defer func() { + logger.LogDuration(start, "Data processing completed", map[string]interface{}{ + "data_size": len(data), + "items": countItems(data), + }) + }() + + // ... processing logic ... +} +``` + +## Migration from Standard Log Package + +### Before (standard log) +```go +import "log" + +log.Printf("Error: %v", err) +log.Printf("User %s logged in", username) +``` + +### After (structured logger) +```go +import "api-service/pkg/logger" + +logger.Error("Operation failed", map[string]interface{}{ + "error": err.Error(), + "context": "user_login", +}) + +logger.Info("User logged in", map[string]interface{}{ + "username": username, + "method": "password", +}) +``` + +## Examples + +### Database Operations +```go +func (h *UserHandler) GetUser(c *gin.Context) { + logger := logger.GetLoggerFromContext(c) + start := time.Now() + + user, err := h.db.GetUser(c.Param("id")) + if err != nil { + logger.Error("Failed to get user", map[string]interface{}{ + "user_id": c.Param("id"), + "error": err.Error(), + }) + c.JSON(500, gin.H{"error": "Internal server error"}) + return + } + + logger.LogDuration(start, "User retrieved successfully", map[string]interface{}{ + "user_id": user.ID, + "query_time": time.Since(start).String(), + }) + + c.JSON(200, user) +} +``` + +### Authentication Service +```go +var authLogger = logger.AuthServiceLogger() + +func Authenticate(username, password string) (bool, error) { + authLogger.Debug("Authentication attempt", map[string]interface{}{ + "username": username, + }) + + // Authentication logic... + + if authenticated { + authLogger.Info("Authentication successful", map[string]interface{}{ + "username": username, + "method": "password", + }) + return true, nil + } + + authLogger.Warn("Authentication failed", map[string]interface{}{ + "username": username, + "reason": "invalid_credentials", + }) + return false, nil +} +``` + +## Troubleshooting + +### Common Issues + +1. **No logs appearing**: Check that log level is not set too high (e.g., ERROR when logging INFO) +2. **JSON format not working**: Ensure `LOG_JSON=true` or logger is created with `jsonFormat: true` +3. **Missing context**: Use `WithRequestID()` and `WithCorrelationID()` for request context + +### Debug Mode + +Enable debug logging for development: + +```bash +export LOG_LEVEL=DEBUG +export LOG_FORMAT=text +``` + +## Performance Considerations + +- Logger is designed to be lightweight and fast +- Context fields are only evaluated when the log level is enabled +- JSON marshaling only occurs when JSON format is enabled +- Consider log volume in production environments + +## License + +This logger package is part of the API Service project. diff --git a/pkg/logger/config.go b/pkg/logger/config.go new file mode 100644 index 0000000..68f69d1 --- /dev/null +++ b/pkg/logger/config.go @@ -0,0 +1,137 @@ +package logger + +import ( + "os" + "strconv" + "strings" +) + +// Config holds the configuration for the logger +type Config struct { + Level string `json:"level" default:"INFO"` + JSONFormat bool `json:"json_format" default:"false"` + Service string `json:"service" default:"api-service"` +} + +// DefaultConfig returns the default logger configuration +func DefaultConfig() Config { + return Config{ + Level: "INFO", + JSONFormat: false, + Service: "api-service", + } +} + +// LoadConfigFromEnv loads logger configuration from environment variables +func LoadConfigFromEnv() Config { + config := DefaultConfig() + + // Load log level from environment + if level := os.Getenv("LOG_LEVEL"); level != "" { + config.Level = strings.ToUpper(level) + } + + // Load JSON format from environment + if jsonFormat := os.Getenv("LOG_JSON_FORMAT"); jsonFormat != "" { + if parsed, err := strconv.ParseBool(jsonFormat); err == nil { + config.JSONFormat = parsed + } + } + + // Load service name from environment + if service := os.Getenv("LOG_SERVICE_NAME"); service != "" { + config.Service = service + } + + return config +} + +// Validate validates the logger configuration +func (c *Config) Validate() error { + // Validate log level + validLevels := map[string]bool{ + "DEBUG": true, + "INFO": true, + "WARN": true, + "ERROR": true, + "FATAL": true, + } + + if !validLevels[c.Level] { + c.Level = "INFO" // Default to INFO if invalid + } + + return nil +} + +// GetLogLevel returns the LogLevel from the configuration +func (c *Config) GetLogLevel() LogLevel { + switch strings.ToUpper(c.Level) { + case "DEBUG": + return DEBUG + case "WARN": + return WARN + case "ERROR": + return ERROR + case "FATAL": + return FATAL + default: + return INFO + } +} + +// CreateLoggerFromConfig creates a new logger instance from configuration +func CreateLoggerFromConfig(cfg Config) *Logger { + cfg.Validate() + return NewFromConfig(cfg) +} + +// CreateLoggerFromEnv creates a new logger instance from environment variables +func CreateLoggerFromEnv() *Logger { + cfg := LoadConfigFromEnv() + return CreateLoggerFromConfig(cfg) +} + +// Environment variable constants +const ( + EnvLogLevel = "LOG_LEVEL" + EnvLogJSONFormat = "LOG_JSON_FORMAT" + EnvLogService = "LOG_SERVICE_NAME" +) + +// Service-specific configuration helpers + +// AuthServiceConfig returns configuration for auth service +func AuthServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "auth-service" + return cfg +} + +// BPJSServiceConfig returns configuration for BPJS service +func BPJSServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "bpjs-service" + return cfg +} + +// RetribusiServiceConfig returns configuration for retribusi service +func RetribusiServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "retribusi-service" + return cfg +} + +// DatabaseServiceConfig returns configuration for database service +func DatabaseServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "database-service" + return cfg +} + +// MiddlewareServiceConfig returns configuration for middleware service +func MiddlewareServiceConfig() Config { + cfg := LoadConfigFromEnv() + cfg.Service = "middleware-service" + return cfg +} diff --git a/pkg/logger/context.go b/pkg/logger/context.go new file mode 100644 index 0000000..3eb52bf --- /dev/null +++ b/pkg/logger/context.go @@ -0,0 +1,142 @@ +package logger + +import ( + "context" + "time" +) + +// contextKey is a custom type for context keys to avoid collisions +type contextKey string + +const ( + loggerKey contextKey = "logger" + requestIDKey contextKey = "request_id" + correlationIDKey contextKey = "correlation_id" + serviceNameKey contextKey = "service_name" +) + +// ContextWithLogger creates a new context with the logger +func ContextWithLogger(ctx context.Context, logger *Logger) context.Context { + return context.WithValue(ctx, loggerKey, logger) +} + +// LoggerFromContext retrieves the logger from context +func LoggerFromContext(ctx context.Context) *Logger { + if logger, ok := ctx.Value(loggerKey).(*Logger); ok { + return logger + } + return globalLogger +} + +// ContextWithRequestID creates a new context with the request ID +func ContextWithRequestID(ctx context.Context, requestID string) context.Context { + return context.WithValue(ctx, requestIDKey, requestID) +} + +// RequestIDFromContext retrieves the request ID from context +func RequestIDFromContext(ctx context.Context) string { + if requestID, ok := ctx.Value(requestIDKey).(string); ok { + return requestID + } + return "" +} + +// ContextWithCorrelationID creates a new context with the correlation ID +func ContextWithCorrelationID(ctx context.Context, correlationID string) context.Context { + return context.WithValue(ctx, correlationIDKey, correlationID) +} + +// CorrelationIDFromContext retrieves the correlation ID from context +func CorrelationIDFromContext(ctx context.Context) string { + if correlationID, ok := ctx.Value(correlationIDKey).(string); ok { + return correlationID + } + return "" +} + +// ContextWithServiceName creates a new context with the service name +func ContextWithServiceName(ctx context.Context, serviceName string) context.Context { + return context.WithValue(ctx, serviceNameKey, serviceName) +} + +// ServiceNameFromContext retrieves the service name from context +func ServiceNameFromContext(ctx context.Context) string { + if serviceName, ok := ctx.Value(serviceNameKey).(string); ok { + return serviceName + } + return "" +} + +// WithContext returns a new logger with context values +func (l *Logger) WithContext(ctx context.Context) *Logger { + logger := l + + if requestID := RequestIDFromContext(ctx); requestID != "" { + logger = logger.WithRequestID(requestID) + } + + if correlationID := CorrelationIDFromContext(ctx); correlationID != "" { + logger = logger.WithCorrelationID(correlationID) + } + + if serviceName := ServiceNameFromContext(ctx); serviceName != "" { + logger = logger.WithService(serviceName) + } + + return logger +} + +// DebugCtx logs a debug message with context +func DebugCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Debug(msg, fields...) +} + +// DebugfCtx logs a formatted debug message with context +func DebugfCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Debugf(format, args...) +} + +// InfoCtx logs an info message with context +func InfoCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Info(msg, fields...) +} + +// InfofCtx logs a formatted info message with context +func InfofCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Infof(format, args...) +} + +// WarnCtx logs a warning message with context +func WarnCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Warn(msg, fields...) +} + +// WarnfCtx logs a formatted warning message with context +func WarnfCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Warnf(format, args...) +} + +// ErrorCtx logs an error message with context +func ErrorCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Error(msg, fields...) +} + +// ErrorfCtx logs a formatted error message with context +func ErrorfCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Errorf(format, args...) +} + +// FatalCtx logs a fatal message with context and exits the program +func FatalCtx(ctx context.Context, msg string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Fatal(msg, fields...) +} + +// FatalfCtx logs a formatted fatal message with context and exits the program +func FatalfCtx(ctx context.Context, format string, args ...interface{}) { + LoggerFromContext(ctx).WithContext(ctx).Fatalf(format, args...) +} + +// LogDurationCtx logs the duration of an operation with context +func LogDurationCtx(ctx context.Context, start time.Time, operation string, fields ...map[string]interface{}) { + LoggerFromContext(ctx).WithContext(ctx).LogDuration(start, operation, fields...) +} diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go new file mode 100644 index 0000000..bcdd59e --- /dev/null +++ b/pkg/logger/logger.go @@ -0,0 +1,616 @@ +package logger + +import ( + "encoding/json" + "fmt" + "log" + "os" + "path/filepath" + "runtime" + "strings" + "sync" + "time" +) + +// LogLevel represents the severity level of a log message +type LogLevel int + +const ( + DEBUG LogLevel = iota + INFO + WARN + ERROR + FATAL +) + +var ( + levelStrings = map[LogLevel]string{ + DEBUG: "DEBUG", + INFO: "INFO", + WARN: "WARN", + ERROR: "ERROR", + FATAL: "FATAL", + } + + stringLevels = map[string]LogLevel{ + "DEBUG": DEBUG, + "INFO": INFO, + "WARN": WARN, + "ERROR": ERROR, + "FATAL": FATAL, + } +) + +// Logger represents a structured logger instance +type Logger struct { + serviceName string + level LogLevel + output *log.Logger + mu sync.Mutex + jsonFormat bool + + logDir string +} + +// LogEntry represents a structured log entry +type LogEntry struct { + Timestamp string `json:"timestamp"` + Level string `json:"level"` + Service string `json:"service"` + Message string `json:"message"` + RequestID string `json:"request_id,omitempty"` + CorrelationID string `json:"correlation_id,omitempty"` + File string `json:"file,omitempty"` + Line int `json:"line,omitempty"` + Duration string `json:"duration,omitempty"` + Fields map[string]interface{} `json:"fields,omitempty"` +} + +// New creates a new logger instance +func New(serviceName string, level LogLevel, jsonFormat bool, logDir ...string) *Logger { + // Tentukan direktori log berdasarkan prioritas: + // 1. Parameter logDir (jika disediakan) + // 2. Environment variable LOG_DIR (jika ada) + // 3. Default ke pkg/logger/data relatif terhadap root proyek + + var finalLogDir string + + // Cek apakah logDir disediakan sebagai parameter + if len(logDir) > 0 && logDir[0] != "" { + finalLogDir = logDir[0] + } else { + // Cek environment variable + if envLogDir := os.Getenv("LOG_DIR"); envLogDir != "" { + finalLogDir = envLogDir + } else { + // Default: dapatkan path relatif terhadap root proyek + // Dapatkan path executable + exePath, err := os.Executable() + if err != nil { + // Fallback ke current working directory jika gagal + finalLogDir = filepath.Join(".", "pkg", "logger", "data") + } else { + // Dapatkan direktori executable + exeDir := filepath.Dir(exePath) + + // Jika berjalan dengan go run, executable ada di temp directory + // Coba dapatkan path source code + if strings.Contains(exeDir, "go-build") || strings.Contains(exeDir, "tmp") { + // Gunakan runtime.Caller untuk mendapatkan path source + _, file, _, ok := runtime.Caller(0) + if ok { + // Dapatkan direktori source (2 level up dari pkg/logger) + sourceDir := filepath.Dir(file) + for i := 0; i < 3; i++ { // Naik 3 level ke root proyek + sourceDir = filepath.Dir(sourceDir) + } + finalLogDir = filepath.Join(sourceDir, "pkg", "logger", "data") + } else { + // Fallback + finalLogDir = filepath.Join(".", "pkg", "logger", "data") + } + } else { + // Untuk binary yang sudah dikompilasi, asumsikan struktur proyek + finalLogDir = filepath.Join(exeDir, "pkg", "logger", "data") + } + } + } + } + + // Konversi ke path absolut + absPath, err := filepath.Abs(finalLogDir) + if err == nil { + finalLogDir = absPath + } + + // Buat direktori jika belum ada + if err := os.MkdirAll(finalLogDir, 0755); err != nil { + // Fallback ke stdout jika gagal membuat direktori + fmt.Printf("Warning: Failed to create log directory %s: %v\n", finalLogDir, err) + return &Logger{ + serviceName: serviceName, + level: level, + output: log.New(os.Stdout, "", 0), + jsonFormat: jsonFormat, + logDir: "", // Kosongkan karena gagal + } + } + + return &Logger{ + serviceName: serviceName, + level: level, + output: log.New(os.Stdout, "", 0), + jsonFormat: jsonFormat, + logDir: finalLogDir, + } +} + +// NewFromConfig creates a new logger from configuration +func NewFromConfig(cfg Config) *Logger { + level := INFO + if l, exists := stringLevels[strings.ToUpper(cfg.Level)]; exists { + level = l + } + + return New(cfg.Service, level, cfg.JSONFormat) +} + +// Default creates a default logger instance +func Default() *Logger { + return New("api-service", INFO, false) +} + +// WithService returns a new logger with the specified service name +func (l *Logger) WithService(serviceName string) *Logger { + return &Logger{ + serviceName: serviceName, + level: l.level, + output: l.output, + jsonFormat: l.jsonFormat, + logDir: l.logDir, + } +} + +// SetLevel sets the log level for the logger +func (l *Logger) SetLevel(level LogLevel) { + l.mu.Lock() + defer l.mu.Unlock() + l.level = level +} + +// SetJSONFormat sets whether to output logs in JSON format +func (l *Logger) SetJSONFormat(jsonFormat bool) { + l.mu.Lock() + defer l.mu.Unlock() + l.jsonFormat = jsonFormat +} + +// Debug logs a debug message +func (l *Logger) Debug(msg string, fields ...map[string]interface{}) { + l.log(DEBUG, msg, nil, fields...) +} + +// Debugf logs a formatted debug message +func (l *Logger) Debugf(format string, args ...interface{}) { + l.log(DEBUG, fmt.Sprintf(format, args...), nil) +} + +// Info logs an info message +func (l *Logger) Info(msg string, fields ...map[string]interface{}) { + l.log(INFO, msg, nil, fields...) +} + +// Infof logs a formatted info message +func (l *Logger) Infof(format string, args ...interface{}) { + l.log(INFO, fmt.Sprintf(format, args...), nil) +} + +// Warn logs a warning message +func (l *Logger) Warn(msg string, fields ...map[string]interface{}) { + l.log(WARN, msg, nil, fields...) +} + +// Warnf logs a formatted warning message +func (l *Logger) Warnf(format string, args ...interface{}) { + l.log(WARN, fmt.Sprintf(format, args...), nil) +} + +// Error logs an error message +func (l *Logger) Error(msg string, fields ...map[string]interface{}) { + l.log(ERROR, msg, nil, fields...) +} + +// Errorf logs a formatted error message +func (l *Logger) Errorf(format string, args ...interface{}) { + l.log(ERROR, fmt.Sprintf(format, args...), nil) +} + +// Fatal logs a fatal message and exits the program +func (l *Logger) Fatal(msg string, fields ...map[string]interface{}) { + l.log(FATAL, msg, nil, fields...) + os.Exit(1) +} + +// Fatalf logs a formatted fatal message and exits the program +func (l *Logger) Fatalf(format string, args ...interface{}) { + l.log(FATAL, fmt.Sprintf(format, args...), nil) + os.Exit(1) +} + +// WithRequestID returns a new logger with the specified request ID +func (l *Logger) WithRequestID(requestID string) *Logger { + return l.withField("request_id", requestID) +} + +// WithCorrelationID returns a new logger with the specified correlation ID +func (l *Logger) WithCorrelationID(correlationID string) *Logger { + return l.withField("correlation_id", correlationID) +} + +// WithField returns a new logger with an additional field +func (l *Logger) WithField(key string, value interface{}) *Logger { + return l.withField(key, value) +} + +// WithFields returns a new logger with additional fields +func (l *Logger) WithFields(fields map[string]interface{}) *Logger { + return &Logger{ + serviceName: l.serviceName, + level: l.level, + output: l.output, + jsonFormat: l.jsonFormat, + logDir: l.logDir, + } +} + +// LogDuration logs the duration of an operation +func (l *Logger) LogDuration(start time.Time, operation string, fields ...map[string]interface{}) { + duration := time.Since(start) + l.Info(fmt.Sprintf("%s completed", operation), append(fields, map[string]interface{}{ + "duration": duration.String(), + "duration_ms": duration.Milliseconds(), + })...) +} + +// log is the internal logging method +func (l *Logger) log(level LogLevel, msg string, duration *time.Duration, fields ...map[string]interface{}) { + if level < l.level { + return + } + + // Get caller information + _, file, line, ok := runtime.Caller(3) // Adjust caller depth + var callerFile string + var callerLine int + if ok { + // Shorten file path + parts := strings.Split(file, "/") + if len(parts) > 2 { + callerFile = strings.Join(parts[len(parts)-2:], "/") + } else { + callerFile = file + } + callerLine = line + } + + // Merge all fields + mergedFields := make(map[string]interface{}) + for _, f := range fields { + for k, v := range f { + mergedFields[k] = v + } + } + + entry := LogEntry{ + Timestamp: time.Now().Format(time.RFC3339), + Level: levelStrings[level], + Service: l.serviceName, + Message: msg, + File: callerFile, + Line: callerLine, + Fields: mergedFields, + } + + if duration != nil { + entry.Duration = duration.String() + } + + if l.jsonFormat { + l.outputJSON(entry) + } else { + l.outputText(entry) + } + + if level == FATAL { + os.Exit(1) + } +} + +// outputJSON outputs the log entry in JSON format +func (l *Logger) outputJSON(entry LogEntry) { + jsonData, err := json.Marshal(entry) + if err != nil { + // Fallback to text output if JSON marshaling fails + l.outputText(entry) + return + } + l.output.Println(string(jsonData)) +} + +// outputText outputs the log entry in text format +func (l *Logger) outputText(entry LogEntry) { + timestamp := entry.Timestamp + level := entry.Level + service := entry.Service + message := entry.Message + + // Base log line + logLine := fmt.Sprintf("%s [%s] %s: %s", timestamp, level, service, message) + + // Add file and line if available + if entry.File != "" && entry.Line > 0 { + logLine += fmt.Sprintf(" (%s:%d)", entry.File, entry.Line) + } + + // Add request ID if available + if entry.RequestID != "" { + logLine += fmt.Sprintf(" [req:%s]", entry.RequestID) + } + + // Add correlation ID if available + if entry.CorrelationID != "" { + logLine += fmt.Sprintf(" [corr:%s]", entry.CorrelationID) + } + + // Add duration if available + if entry.Duration != "" { + logLine += fmt.Sprintf(" [dur:%s]", entry.Duration) + } + + // Add additional fields + if len(entry.Fields) > 0 { + fields := make([]string, 0, len(entry.Fields)) + for k, v := range entry.Fields { + fields = append(fields, fmt.Sprintf("%s=%v", k, v)) + } + logLine += " [" + strings.Join(fields, " ") + "]" + } + + l.output.Println(logLine) +} + +// withField creates a new logger with an additional field +func (l *Logger) withField(key string, value interface{}) *Logger { + return &Logger{ + serviceName: l.serviceName, + level: l.level, + output: l.output, + jsonFormat: l.jsonFormat, + logDir: l.logDir, + } +} + +// String returns the string representation of a log level +func (l LogLevel) String() string { + return levelStrings[l] +} + +// ParseLevel parses a string into a LogLevel +func ParseLevel(level string) (LogLevel, error) { + if l, exists := stringLevels[strings.ToUpper(level)]; exists { + return l, nil + } + return INFO, fmt.Errorf("invalid log level: %s", level) +} + +// Global logger instance +var globalLogger = Default() + +// SetGlobalLogger sets the global logger instance +func SetGlobalLogger(logger *Logger) { + globalLogger = logger +} + +// Global logging functions +func Debug(msg string, fields ...map[string]interface{}) { + globalLogger.Debug(msg, fields...) +} + +func Debugf(format string, args ...interface{}) { + globalLogger.Debugf(format, args...) +} + +func Info(msg string, fields ...map[string]interface{}) { + globalLogger.Info(msg, fields...) +} + +func Infof(format string, args ...interface{}) { + globalLogger.Infof(format, args...) +} + +func Warn(msg string, fields ...map[string]interface{}) { + globalLogger.Warn(msg, fields...) +} + +func Warnf(format string, args ...interface{}) { + globalLogger.Warnf(format, args...) +} + +func Error(msg string, fields ...map[string]interface{}) { + globalLogger.Error(msg, fields...) +} + +func Errorf(format string, args ...interface{}) { + globalLogger.Errorf(format, args...) +} + +func Fatal(msg string, fields ...map[string]interface{}) { + globalLogger.Fatal(msg, fields...) +} + +func Fatalf(format string, args ...interface{}) { + globalLogger.Fatalf(format, args...) +} + +// SaveLogText menyimpan log dalam format teks dengan pemisah | +func (l *Logger) SaveLogText(entry LogEntry) error { + // Format log dengan pemisah | + logLine := fmt.Sprintf("%s|%s|%s|%s|%s|%s|%s|%s:%d", + entry.Timestamp, + entry.Level, + entry.Service, + entry.Message, + entry.RequestID, + entry.CorrelationID, + entry.Duration, + entry.File, + entry.Line) + + // Tambahkan fields jika ada + if len(entry.Fields) > 0 { + fieldsStr := "" + for k, v := range entry.Fields { + fieldsStr += fmt.Sprintf("|%s=%v", k, v) + } + logLine += fieldsStr + } + logLine += "\n" + + // Buat direktori jika belum ada + if err := os.MkdirAll(l.logDir, 0755); err != nil { + return err + } + + // Tulis ke file dengan mutex lock untuk concurrency safety + l.mu.Lock() + defer l.mu.Unlock() + + filePath := filepath.Join(l.logDir, "logs.txt") + f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + defer f.Close() + + if _, err := f.WriteString(logLine); err != nil { + return err + } + return nil +} + +// SaveLogJSON menyimpan log dalam format JSON +func (l *Logger) SaveLogJSON(entry LogEntry) error { + jsonData, err := json.Marshal(entry) + if err != nil { + return err + } + + // Buat direktori jika belum ada + if err := os.MkdirAll(l.logDir, 0755); err != nil { + return err + } + + // Tulis ke file dengan mutex lock for concurrency safety + l.mu.Lock() + defer l.mu.Unlock() + + filePath := filepath.Join(l.logDir, "logs.json") + f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + defer f.Close() + + if _, err := f.WriteString(string(jsonData) + "\n"); err != nil { + return err + } + return nil +} + +// SaveLogToDatabase menyimpan log ke database +func (l *Logger) SaveLogToDatabase(entry LogEntry) error { + // Implementasi penyimpanan ke database + // Ini adalah contoh implementasi, sesuaikan dengan struktur database Anda + + // Untuk saat ini, kita akan simpan ke file sebagai placeholder + // Anda dapat mengganti ini dengan koneksi database yang sesuai + dbLogLine := fmt.Sprintf("DB_LOG: %s|%s|%s|%s\n", + entry.Timestamp, entry.Level, entry.Service, entry.Message) + + if err := os.MkdirAll(l.logDir, 0755); err != nil { + return err + } + + // Tulis ke file dengan mutex lock for concurrency safety + l.mu.Lock() + defer l.mu.Unlock() + + filePath := filepath.Join(l.logDir, "database_logs.txt") + f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + defer f.Close() + + if _, err := f.WriteString(dbLogLine); err != nil { + return err + } + return nil +} + +// LogAndSave melakukan logging dan menyimpan ke semua format +func (l *Logger) LogAndSave(level LogLevel, msg string, fields ...map[string]interface{}) { + // Panggil fungsi log biasa + l.log(level, msg, nil, fields...) + + // Dapatkan entry log yang baru dibuat + _, file, line, ok := runtime.Caller(2) + var callerFile string + var callerLine int + if ok { + parts := strings.Split(file, "/") + if len(parts) > 2 { + callerFile = strings.Join(parts[len(parts)-2:], "/") + } else { + callerFile = file + } + callerLine = line + } + + mergedFields := make(map[string]interface{}) + for _, f := range fields { + for k, v := range f { + mergedFields[k] = v + } + } + + entry := LogEntry{ + Timestamp: time.Now().Format(time.RFC3339), + Level: levelStrings[level], + Service: l.serviceName, + Message: msg, + File: callerFile, + Line: callerLine, + Fields: mergedFields, + } + + // Simpan ke semua format + go func() { + l.SaveLogText(entry) + l.SaveLogJSON(entry) + l.SaveLogToDatabase(entry) + }() +} + +// Global fungsi untuk menyimpan log +func SaveLogText(entry LogEntry) error { + return globalLogger.SaveLogText(entry) +} + +func SaveLogJSON(entry LogEntry) error { + return globalLogger.SaveLogJSON(entry) +} + +func SaveLogToDatabase(entry LogEntry) error { + return globalLogger.SaveLogToDatabase(entry) +} diff --git a/pkg/logger/middleware.go b/pkg/logger/middleware.go new file mode 100644 index 0000000..d063a83 --- /dev/null +++ b/pkg/logger/middleware.go @@ -0,0 +1,191 @@ +package logger + +import ( + "bytes" + "io" + "strings" + "time" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" +) + +// RequestLoggerMiddleware creates a Gin middleware for request logging +func RequestLoggerMiddleware(logger *Logger) gin.HandlerFunc { + return func(c *gin.Context) { + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + // Get correlation ID + correlationID := c.GetHeader("X-Correlation-ID") + if correlationID == "" { + correlationID = uuid.New().String() + c.Header("X-Correlation-ID", correlationID) + } + + // Create request-scoped logger + reqLogger := logger. + WithRequestID(requestID). + WithCorrelationID(correlationID) + + // Store logger in context + c.Set("logger", reqLogger) + c.Set("request_id", requestID) + c.Set("correlation_id", correlationID) + + // Capture request body for logging if needed + var requestBody []byte + if c.Request.Body != nil && strings.HasPrefix(c.ContentType(), "application/json") { + requestBody, _ = io.ReadAll(c.Request.Body) + c.Request.Body = io.NopCloser(bytes.NewBuffer(requestBody)) + } + + // Start timer + start := time.Now() + + // Log request start + reqLogger.Info("Request started", map[string]interface{}{ + "method": c.Request.Method, + "path": c.Request.URL.Path, + "query": c.Request.URL.RawQuery, + "remote_addr": c.Request.RemoteAddr, + "user_agent": c.Request.UserAgent(), + "content_type": c.ContentType(), + "body_size": len(requestBody), + }) + + // Process request + c.Next() + + // Calculate duration + duration := time.Since(start) + + // Get response status + status := c.Writer.Status() + responseSize := c.Writer.Size() + + // Log level based on status code + var logLevel LogLevel + switch { + case status >= 500: + logLevel = ERROR + case status >= 400: + logLevel = WARN + default: + logLevel = INFO + } + + // Log request completion + fields := map[string]interface{}{ + "method": c.Request.Method, + "path": c.Request.URL.Path, + "status": status, + "duration": duration.String(), + "duration_ms": duration.Milliseconds(), + "response_size": responseSize, + "client_ip": c.ClientIP(), + "user_agent": c.Request.UserAgent(), + "content_type": c.ContentType(), + "content_length": c.Request.ContentLength, + } + + // Add query parameters if present + if c.Request.URL.RawQuery != "" { + fields["query"] = c.Request.URL.RawQuery + } + + // Add error information if present + if len(c.Errors) > 0 { + errors := make([]string, len(c.Errors)) + for i, err := range c.Errors { + errors[i] = err.Error() + } + fields["errors"] = errors + } + + reqLogger.log(logLevel, "Request completed", &duration, fields) + } +} + +// GetLoggerFromContext retrieves the logger from Gin context +func GetLoggerFromContext(c *gin.Context) *Logger { + if logger, exists := c.Get("logger"); exists { + if l, ok := logger.(*Logger); ok { + return l + } + } + return globalLogger +} + +// GetRequestIDFromContext retrieves the request ID from Gin context +func GetRequestIDFromContext(c *gin.Context) string { + if requestID, exists := c.Get("request_id"); exists { + if id, ok := requestID.(string); ok { + return id + } + } + return "" +} + +// GetCorrelationIDFromContext retrieves the correlation ID from Gin context +func GetCorrelationIDFromContext(c *gin.Context) string { + if correlationID, exists := c.Get("correlation_id"); exists { + if id, ok := correlationID.(string); ok { + return id + } + } + return "" +} + +// DatabaseLoggerMiddleware creates middleware for database operation logging +func DatabaseLoggerMiddleware(logger *Logger, serviceName string) gin.HandlerFunc { + return func(c *gin.Context) { + reqLogger := GetLoggerFromContext(c).WithService(serviceName) + c.Set("db_logger", reqLogger) + c.Next() + } +} + +// GetDBLoggerFromContext retrieves the database logger from Gin context +func GetDBLoggerFromContext(c *gin.Context) *Logger { + if logger, exists := c.Get("db_logger"); exists { + if l, ok := logger.(*Logger); ok { + return l + } + } + return GetLoggerFromContext(c) +} + +// ServiceLogger creates a service-specific logger +func ServiceLogger(serviceName string) *Logger { + return globalLogger.WithService(serviceName) +} + +// AuthServiceLogger returns a logger for auth service +func AuthServiceLogger() *Logger { + return ServiceLogger("auth-service") +} + +// BPJSServiceLogger returns a logger for BPJS service +func BPJSServiceLogger() *Logger { + return ServiceLogger("bpjs-service") +} + +// RetribusiServiceLogger returns a logger for retribusi service +func RetribusiServiceLogger() *Logger { + return ServiceLogger("retribusi-service") +} + +// DatabaseServiceLogger returns a logger for database operations +func DatabaseServiceLogger() *Logger { + return ServiceLogger("database-service") +} + +// MiddlewareServiceLogger returns a logger for middleware operations +func MiddlewareServiceLogger() *Logger { + return ServiceLogger("middleware-service") +} diff --git a/pkg/utils/etag.go b/pkg/utils/etag.go new file mode 100644 index 0000000..eeba954 --- /dev/null +++ b/pkg/utils/etag.go @@ -0,0 +1,54 @@ +package utils + +import ( + "fmt" + "strings" +) + +// ParseETag extracts the ETag value from HTTP ETag header +// Handles both strong ETags ("123") and weak ETags (W/"123") +func ParseETag(etag string) string { + if etag == "" { + return "" + } + + // Remove W/ prefix for weak ETags + if strings.HasPrefix(etag, "W/") { + etag = etag[2:] + } + + // Remove surrounding quotes + if len(etag) >= 2 && strings.HasPrefix(etag, "\"") && strings.HasSuffix(etag, "\"") { + etag = etag[1 : len(etag)-1] + } + + return etag +} + +// FormatETag formats a version ID into a proper HTTP ETag header value +func FormatETag(versionId string, weak bool) string { + if versionId == "" { + return "" + } + + if weak { + return fmt.Sprintf(`W/"%s"`, versionId) + } + + return fmt.Sprintf(`"%s"`, versionId) +} + +// IsValidETag validates if the given string is a valid ETag format +func IsValidETag(etag string) bool { + if etag == "" { + return false + } + + // Check for weak ETag format + if strings.HasPrefix(etag, "W/") { + etag = etag[2:] + } + + // Must be quoted + return len(etag) >= 2 && strings.HasPrefix(etag, "\"") && strings.HasSuffix(etag, "\"") +} diff --git a/pkg/utils/utils b/pkg/utils/utils new file mode 100644 index 0000000..e69de29 diff --git a/pkg/validator/validator b/pkg/validator/validator new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/pkg/validator/validator @@ -0,0 +1 @@ + diff --git a/scripts/scripts b/scripts/scripts new file mode 100644 index 0000000..e69de29 diff --git a/tools/bpjs/generate-handler.go b/tools/bpjs/generate-handler.go new file mode 100644 index 0000000..77cfe32 --- /dev/null +++ b/tools/bpjs/generate-handler.go @@ -0,0 +1,3176 @@ +package main + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "io/ioutil" + "os" + "path/filepath" + "regexp" + "strings" + "text/template" + "time" + + "gopkg.in/yaml.v2" +) + +// Enhanced structures untuk validasi +type HandlerValidation struct { + ExistingFunctions map[string]bool + NewFunctions []string + UpdatedFiles []string + CreatedFiles []string +} + +type DirectoryInfo struct { + Path string + IsFile bool + Functions []FunctionInfo + Children map[string]*DirectoryInfo +} + +type FunctionInfo struct { + Name string + Methods []string + Endpoint string + Config EndpointConfig +} + +type EndpointConfig struct { + Methods []string `yaml:"methods"` + GetRoutes string `yaml:"get_routes"` + PostRoutes string `yaml:"post_routes"` + PutRoutes string `yaml:"put_routes"` + DeleteRoutes string `yaml:"delete_routes"` + GetPath string `yaml:"get_path"` + PostPath string `yaml:"post_path"` + PutPath string `yaml:"put_path"` + DeletePath string `yaml:"delete_path"` + Model string `yaml:"model"` + ResponseModel string `yaml:"response_model"` + RequestModel string `yaml:"request_model"` + Description string `yaml:"description"` + Summary string `yaml:"summary"` + Tags []string `yaml:"tags"` + RequireAuth bool `yaml:"require_auth"` + CacheEnabled bool `yaml:"cache_enabled"` + CacheTTL int `yaml:"cache_ttl"` +} + +type GlobalConfig struct { + ModuleName string `yaml:"module_name"` + OutputDir string `yaml:"output_dir"` + EnableSwagger bool `yaml:"enable_swagger"` + EnableLogging bool `yaml:"enable_logging"` +} + +type ServiceConfig struct { + Global GlobalConfig `yaml:"global,omitempty"` + Services map[string]Service `yaml:"services"` +} + +type Service struct { + Name string `yaml:"name"` + Category string `yaml:"category"` + Package string `yaml:"package"` + Description string `yaml:"description"` + BaseURL string `yaml:"base_url"` + Timeout int `yaml:"timeout"` + RetryCount int `yaml:"retry_count"` + Endpoints map[string]EndpointGroup `yaml:"endpoints"` + Dependencies string `yaml:"dependencies,omitempty"` + Middleware string `yaml:"middleware,omitempty"` +} + +type EndpointGroup struct { + Description string `yaml:"description"` + HandlerFolder string `yaml:"handler_folder"` + HandlerFile string `yaml:"handler_file"` + HandlerName string `yaml:"handler_name"` + Functions map[string]FunctionConfig `yaml:"functions"` +} + +type FunctionConfig struct { + Methods []string `yaml:"methods"` + Path string `yaml:"path"` + Model string `yaml:"model"` + RoutesLink string `yaml:"routes_link"` + // Routes untuk endpoint generation + GetRoutes string `yaml:"get_routes"` + PostRoutes string `yaml:"post_routes"` + PutRoutes string `yaml:"put_routes"` + DeleteRoutes string `yaml:"delete_routes"` + // โœ… Path untuk swagger documentation + GetPath string `yaml:"get_path"` + PostPath string `yaml:"post_path"` + PutPath string `yaml:"put_path"` + DeletePath string `yaml:"delete_path"` + ResponseModel string `yaml:"response_model"` + RequestModel string `yaml:"request_model"` + Description string `yaml:"description"` + Summary string `yaml:"summary"` + Tags []string `yaml:"tags"` + RequireAuth bool `yaml:"require_auth"` + CacheEnabled bool `yaml:"cache_enabled"` + CacheTTL int `yaml:"cache_ttl"` +} + +type TemplateData struct { + ServiceName string + ServiceLower string + ServiceUpper string + Category string + Package string + Description string + BaseURL string + Timeout int + RetryCount int + Endpoints []EndpointData + Timestamp string + ModuleName string + HasValidator bool + HasLogger bool + HasSwagger bool + GlobalConfig GlobalConfig + ShouldGenerateStruct bool + ShouldGenerateConstructor bool + FunctionalArea string // e.g. "rujukan", "search" + HandlerName string +} + +type EndpointData struct { + Name string + NameLower string + NameUpper string + NameCamel string + Methods []string + GetRoutes string + PostRoutes string + PutRoutes string + DeleteRoutes string + GetPath string + PostPath string + PutPath string + DeletePath string + Model string + ResponseModel string + RequestModel string + DataModel string + Description string + Summary string + Tags []string + HasGet bool + HasPost bool + HasPut bool + HasDelete bool + RequireAuth bool + CacheEnabled bool + CacheTTL int + PathParams []string + ModelPackage string +} + +// Fungsi utama yang diupdate untuk menggunakan separation of concerns +func generateHandlerWithValidation(serviceName string, svc Service, gc GlobalConfig) error { + // Step 1: Generate base handler file (struct + constructor) - SEKALI SAJA + // err := generateBaseHandlerFile(serviceName, svc, gc) + // if err != nil { + // return fmt.Errorf("generate base handler: %w", err) + // } + + // Step 2: Generate methods files per endpoint group + baseDir := gc.OutputDir + for groupName, grp := range svc.Endpoints { + folder := filepath.Join(baseDir, grp.HandlerFolder) + if err := os.MkdirAll(folder, 0755); err != nil { + return fmt.Errorf("mkdir %s: %w", folder, err) + } + + // Generate methods file dengan naming yang jelas + methodsFileName := fmt.Sprintf("%s.go", strings.ToLower(groupName)) + methodsFilePath := filepath.Join(folder, methodsFileName) + + // Check if methods file exists + fileExists := false + if _, err := os.Stat(methodsFilePath); err == nil { + fileExists = true + } + + if !fileExists { + // Create new methods file + err := createMethodsFileFromConfig(methodsFilePath, svc, grp, gc, groupName) + if err != nil { + return fmt.Errorf("create methods file %s: %w", methodsFilePath, err) + } + fmt.Printf("โœ… Created methods file: %s\n", methodsFilePath) + } else { + // Update existing methods file with new functions only + err := updateExistingMethodsFile(methodsFilePath, svc, grp, gc, groupName) + if err != nil { + return fmt.Errorf("update methods file %s: %w", methodsFilePath, err) + } + fmt.Printf("โœ… Updated methods file: %s\n", methodsFilePath) + } + } + // โœ… Step 2: Generate routes + err := generateRoutes(serviceName, svc, gc) + if err != nil { + return fmt.Errorf("generate routes: %w", err) + } + + return nil +} + +// Create new methods file +func createMethodsFileFromConfig(filePath string, svc Service, grp EndpointGroup, gc GlobalConfig, functionalArea string) error { + // Collect all functions for this group + var allEndpoints []EndpointData + for fname, fcfg := range grp.Functions { + td := processFunctionData(svc, grp, fname, fcfg, gc) + allEndpoints = append(allEndpoints, td.Endpoints...) + } + + templateData := TemplateData{ + ServiceName: svc.Name, + ServiceLower: strings.ToLower(svc.Name), + ServiceUpper: strings.ToUpper(svc.Name), + Category: svc.Category, + Package: grp.HandlerFolder, + Description: svc.Description, + BaseURL: svc.BaseURL, + Timeout: svc.Timeout, + RetryCount: svc.RetryCount, + Endpoints: allEndpoints, + Timestamp: time.Now().Format("2006-01-02 15:04:05"), + ModuleName: gc.ModuleName, + HasValidator: true, + HasLogger: gc.EnableLogging, + HasSwagger: gc.EnableSwagger, + GlobalConfig: gc, + ShouldGenerateStruct: false, // NEVER generate struct in methods file + ShouldGenerateConstructor: false, // NEVER generate constructor in methods file + FunctionalArea: functionalArea, + HandlerName: grp.HandlerName, // PERBAIKAN: Pastikan HandlerName diset + } + + return createMethodsFile(filePath, templateData) +} + +// Create methods file using methods-only template +func createMethodsFile(filePath string, templateData TemplateData) error { + if err := os.MkdirAll(filepath.Dir(filePath), 0755); err != nil { + return err + } + + tmpl := template.New("methods").Funcs(template.FuncMap{ + "title": strings.Title, + "index": func(slice []string, i int) string { + if i >= 0 && i < len(slice) { + return slice[i] + } + return "" + }, + }) + + tmpl, err := tmpl.Parse(handlerTemplate) + if err != nil { + return err + } + + file, err := os.Create(filePath) + if err != nil { + return err + } + defer file.Close() + + return tmpl.Execute(file, templateData) +} + +// Update existing methods file (sama seperti sebelumnya tapi tanpa struct) +func updateExistingMethodsFile(filePath string, svc Service, grp EndpointGroup, gc GlobalConfig, functionalArea string) error { + existingContent, err := ioutil.ReadFile(filePath) + if err != nil { + return err + } + + content := string(existingContent) + // Check for existing functions and collect new ones + var newEndpoints []EndpointData + + for fname, fcfg := range grp.Functions { + functionExists := false + for _, method := range fcfg.Methods { + funcName := generateFunctionName(fname, method) + // PERBAIKAN: Gunakan grp.HandlerName bukan svc.Name + sig := fmt.Sprintf("func (h *%sHandler) %s", grp.HandlerName, funcName) + if strings.Contains(content, sig) { + fmt.Printf("โš ๏ธ Skip existing: %s (%s)\n", fname, funcName) + functionExists = true + break + } + } + + if functionExists { + continue + } + + td := processFunctionData(svc, grp, fname, fcfg, gc) + newEndpoints = append(newEndpoints, td.Endpoints...) + fmt.Printf("โœ… Will add: %s\n", fname) + } + + if len(newEndpoints) == 0 { + fmt.Printf("โญ๏ธ No new functions to add\n") + return nil + } + + // Generate new functions using methods-only template + templateData := TemplateData{ + ServiceName: svc.Name, + ServiceLower: strings.ToLower(svc.Name), + ServiceUpper: strings.ToUpper(svc.Name), + Category: svc.Category, + Package: grp.HandlerFolder, + Description: svc.Description, + BaseURL: svc.BaseURL, + Timeout: svc.Timeout, + RetryCount: svc.RetryCount, + Endpoints: newEndpoints, + Timestamp: time.Now().Format("2006-01-02 15:04:05"), + ModuleName: gc.ModuleName, + HasValidator: true, + HasLogger: gc.EnableLogging, + HasSwagger: gc.EnableSwagger, + GlobalConfig: gc, + ShouldGenerateStruct: false, // NEVER + ShouldGenerateConstructor: false, // NEVER + FunctionalArea: functionalArea, + HandlerName: grp.HandlerName, // PERBAIKAN: Pastikan HandlerName diset + } + + newFunctions, err := generateNewMethodsOnly(templateData) + if err != nil { + return err + } + + // Merge content + mergedContent := mergeGoFileContent(content, newFunctions) + return ioutil.WriteFile(filePath, []byte(mergedContent), 0644) +} + +// Generate new methods using methods-only template +func generateNewMethodsOnly(templateData TemplateData) (string, error) { + tmpl := template.New("newMethods") + tmpl, err := tmpl.Parse(handlerTemplate) + if err != nil { + return "", err + } + + var result strings.Builder + err = tmpl.Execute(&result, templateData) + if err != nil { + return "", err + } + + return result.String(), nil +} + +// Helper toCamelCase +func toCamelCase(s string) string { + parts := strings.FieldsFunc(s, func(r rune) bool { + return r == '_' || r == '-' || r == ' ' + }) + for i, p := range parts { + parts[i] = strings.Title(strings.ToLower(p)) + } + return strings.Join(parts, "") +} + +// Parse struktur direktori dari endpoints YAML +func parseDirectoryStructure(endpoints map[string]interface{}, serviceName string) *DirectoryInfo { + root := &DirectoryInfo{ + Path: "", + Children: make(map[string]*DirectoryInfo), + } + + for key, value := range endpoints { + parseNestedEndpoints(root, key, value) + } + + return root +} + +func parseNestedEndpoints(parent *DirectoryInfo, name string, value interface{}) { + switch v := value.(type) { + case map[string]interface{}: + // Check if this contains direct endpoint config + if isDirectEndpoint(v) { + // This is a direct endpoint - create as file + parent.Children[name] = &DirectoryInfo{ + Path: name, + IsFile: true, + Functions: []FunctionInfo{parseEndpointToFunction(name, v)}, + Children: make(map[string]*DirectoryInfo), + } + } else { + // This is nested structure - create as directory or file + child := &DirectoryInfo{ + Path: name, + IsFile: false, + Functions: make([]FunctionInfo, 0), + Children: make(map[string]*DirectoryInfo), + } + + // Check if any direct children are endpoints + hasDirectEndpoints := false + for childName, childValue := range v { + if childMap, ok := childValue.(map[string]interface{}); ok && isDirectEndpoint(childMap) { + hasDirectEndpoints = true + child.Functions = append(child.Functions, parseEndpointToFunction(childName, childMap)) + } + } + + if hasDirectEndpoints { + child.IsFile = true + } + + parent.Children[name] = child + + // Recursively parse nested children + for childName, childValue := range v { + if childMap, ok := childValue.(map[string]interface{}); ok && !isDirectEndpoint(childMap) { + parseNestedEndpoints(child, childName, childValue) + } + } + } + } +} + +func isDirectEndpoint(m map[string]interface{}) bool { + _, hasMethods := m["methods"] + _, hasGetPath := m["get_path"] + _, hasPostPath := m["post_path"] + return hasMethods || hasGetPath || hasPostPath +} + +func parseEndpointToFunction(name string, config map[string]interface{}) FunctionInfo { + function := FunctionInfo{ + Name: name, + Endpoint: name, + Methods: make([]string, 0), + } + + if methods, ok := config["methods"].([]interface{}); ok { + for _, method := range methods { + if methodStr, ok := method.(string); ok { + function.Methods = append(function.Methods, strings.ToUpper(strings.TrimSpace(methodStr))) + } + } + } else if methodStr, ok := config["methods"].(string); ok { + // Handle case where methods is a string like "GET,POST" + methods := strings.Split(methodStr, ",") + for _, method := range methods { + function.Methods = append(function.Methods, strings.ToUpper(strings.TrimSpace(method))) + } + } + + return function +} + +// Process directory structure dan generate files +func processDirectoryStructure(baseDir string, dirInfo *DirectoryInfo, service Service, globalConfig GlobalConfig, validation *HandlerValidation) error { + for name, child := range dirInfo.Children { + currentPath := filepath.Join(baseDir, child.Path) + + if child.IsFile { + // Process as file + err := processHandlerFile(currentPath, name, child, service, globalConfig, validation) + if err != nil { + return fmt.Errorf("failed to process file %s: %w", name, err) + } + } else { + // Create directory dan process children + if err := os.MkdirAll(currentPath, 0755); err != nil { + return fmt.Errorf("failed to create directory %s: %w", currentPath, err) + } + + err := processDirectoryStructure(currentPath, child, service, globalConfig, validation) + if err != nil { + return err + } + } + } + + return nil +} +func processFunctionData(svc Service, grp EndpointGroup, fname string, fcfg FunctionConfig, gc GlobalConfig) TemplateData { + ed := EndpointData{ + Name: toCamelCase(fname), + NameLower: strings.ToLower(fname), + NameUpper: strings.ToUpper(fname), + NameCamel: toCamelCase(fname), + Methods: fcfg.Methods, + GetRoutes: fcfg.GetRoutes, + PostRoutes: fcfg.PostRoutes, + PutRoutes: fcfg.PutRoutes, + DeleteRoutes: fcfg.DeleteRoutes, + GetPath: fcfg.Path, + PostPath: fcfg.Path, + PutPath: fcfg.Path, + DeletePath: fcfg.Path, + Model: fcfg.Model, + ResponseModel: fcfg.ResponseModel, + RequestModel: fcfg.RequestModel, + DataModel: strings.Replace(fcfg.ResponseModel, "Response", "Data", 1), + Description: fcfg.Description, + Summary: fcfg.Summary, + Tags: fcfg.Tags, + RequireAuth: fcfg.RequireAuth, + CacheEnabled: fcfg.CacheEnabled, + CacheTTL: fcfg.CacheTTL, + PathParams: extractPathParams(fcfg.Path), + ModelPackage: grp.HandlerFolder, + } + // set flags + for _, m := range fcfg.Methods { + switch strings.ToUpper(m) { + case "GET": + ed.HasGet = true + case "POST": + ed.HasPost = true + case "PUT": + ed.HasPut = true + case "DELETE": + ed.HasDelete = true + } + } + return TemplateData{ + ServiceName: svc.Name, + ServiceLower: strings.ToLower(svc.Name), + ServiceUpper: strings.ToUpper(svc.Name), + Category: svc.Category, + Package: grp.HandlerFolder, + Description: svc.Description, + BaseURL: svc.BaseURL, + Timeout: svc.Timeout, + RetryCount: svc.RetryCount, + Endpoints: []EndpointData{ed}, + Timestamp: time.Now().Format("2006-01-02 15:04:05"), + ModuleName: gc.ModuleName, + HasValidator: true, + HasLogger: gc.EnableLogging, + HasSwagger: gc.EnableSwagger, + GlobalConfig: gc, + HandlerName: grp.HandlerName, + } +} + +// extractPathParams (jika belum ada) +func extractPathParams(path string) []string { + var ps []string + for _, part := range strings.Split(path, "/") { + if strings.HasPrefix(part, ":") { + ps = append(ps, strings.TrimPrefix(part, ":")) + } + } + return ps +} + +// Process individual handler file - PERBAIKAN: Hapus referensi grp.HandlerName +func processHandlerFile(basePath, fileName string, dirInfo *DirectoryInfo, service Service, globalConfig GlobalConfig, validation *HandlerValidation) error { + filePath := filepath.Join(filepath.Dir(basePath), fmt.Sprintf("%s.go", fileName)) + fmt.Printf("๐Ÿ“„ Processing file: %s\n", filePath) + + // Check if file exists + fileExists := false + if _, err := os.Stat(filePath); err == nil { + fileExists = true + fmt.Printf(" ๐Ÿ“‹ File exists, checking functions...\n") + } + + var existingFunctions map[string]bool + if fileExists { + // Parse existing functions - PERBAIKAN: Hanya gunakan 1 parameter + functions, err := extractExistingFunctions(filePath) + if err != nil { + fmt.Printf(" โš ๏ธ Warning: Could not parse existing file: %v\n", err) + existingFunctions = make(map[string]bool) + } else { + existingFunctions = functions + fmt.Printf(" โœ“ Found %d existing functions\n", len(functions)) + } + } else { + existingFunctions = make(map[string]bool) + } + + // Determine which functions to generate + functionsToGenerate := make([]FunctionInfo, 0) + for _, fnInfo := range dirInfo.Functions { + for _, method := range fnInfo.Methods { + functionName := generateFunctionName(fnInfo.Name, method) + if !existingFunctions[functionName] { + functionsToGenerate = append(functionsToGenerate, FunctionInfo{ + Name: fnInfo.Name, + Methods: []string{method}, + Endpoint: fnInfo.Endpoint, + }) + validation.NewFunctions = append(validation.NewFunctions, functionName) + fmt.Printf(" โœ… Will generate: %s\n", functionName) + } else { + fmt.Printf(" โญ๏ธ Already exists: %s\n", functionName) + } + } + } + + // Generate file if needed + if len(functionsToGenerate) > 0 { + templateData := prepareTemplateData(fileName, service, globalConfig, functionsToGenerate) + + if fileExists { + // Merge with existing file + err := mergeWithExistingFile(filePath, templateData) + if err != nil { + return fmt.Errorf("failed to merge file %s: %w", filePath, err) + } + validation.UpdatedFiles = append(validation.UpdatedFiles, filePath) + fmt.Printf(" ๐Ÿ“ Updated existing file\n") + } else { + // Create new file + err := createNewHandlerFile(filePath, templateData) + if err != nil { + return fmt.Errorf("failed to create file %s: %w", filePath, err) + } + validation.CreatedFiles = append(validation.CreatedFiles, filePath) + fmt.Printf(" ๐Ÿ“ Created new file\n") + } + } else if !fileExists { + fmt.Printf(" โญ๏ธ No functions to generate and file doesn't exist\n") + } else { + fmt.Printf(" โญ๏ธ All functions already exist\n") + } + + return nil +} + +// Extract existing functions from Go file +func extractExistingFunctions(filePath string) (map[string]bool, error) { + fileSet := token.NewFileSet() + node, err := parser.ParseFile(fileSet, filePath, nil, parser.ParseComments) + if err != nil { + return nil, err + } + + functions := make(map[string]bool) + ast.Inspect(node, func(n ast.Node) bool { + if fn, ok := n.(*ast.FuncDecl); ok { + if fn.Name != nil { + functions[fn.Name.Name] = true + } + } + return true + }) + return functions, nil +} + +// Generate function name berdasarkan endpoint dan method +func generateFunctionName(endpointName, method string) string { + switch strings.ToUpper(method) { + case "GET": + return fmt.Sprintf("Get%s", strings.Title(endpointName)) + case "POST": + return fmt.Sprintf("Create%s", strings.Title(endpointName)) + case "PUT": + return fmt.Sprintf("Update%s", strings.Title(endpointName)) + case "DELETE": + return fmt.Sprintf("Delete%s", strings.Title(endpointName)) + case "PATCH": + return fmt.Sprintf("Patch%s", strings.Title(endpointName)) + default: + return fmt.Sprintf("%s%s", strings.Title(method), strings.Title(endpointName)) + } +} + +// Prepare template data +func prepareTemplateData(packageName string, service Service, globalConfig GlobalConfig, functions []FunctionInfo) TemplateData { + endpoints := make([]EndpointData, 0) + + for _, fnInfo := range functions { + endpoint := EndpointData{ + Name: strings.Title(fnInfo.Name), + NameLower: strings.ToLower(fnInfo.Name), + NameUpper: strings.ToUpper(fnInfo.Name), + NameCamel: toCamelCase(fnInfo.Name), + ModelPackage: packageName, + Model: fmt.Sprintf("%sRequest", strings.Title(fnInfo.Name)), + ResponseModel: fmt.Sprintf("%sResponse", strings.Title(fnInfo.Name)), + RequestModel: fmt.Sprintf("%sRequest", strings.Title(fnInfo.Name)), + DataModel: fmt.Sprintf("%sData", strings.Title(fnInfo.Name)), + Description: fmt.Sprintf("Handle %s operations", fnInfo.Name), + Tags: []string{strings.Title(packageName)}, + } + + // Set paths dan methods + for _, method := range fnInfo.Methods { + switch strings.ToUpper(method) { + case "GET": + endpoint.HasGet = true + endpoint.GetPath = fmt.Sprintf("/%s/%s", packageName, fnInfo.Name) + case "POST": + endpoint.HasPost = true + endpoint.PostPath = fmt.Sprintf("/%s", packageName) + case "PUT": + endpoint.HasPut = true + endpoint.PutPath = fmt.Sprintf("/%s/%s", packageName, fnInfo.Name) + case "DELETE": + endpoint.HasDelete = true + endpoint.DeletePath = fmt.Sprintf("/%s/%s", packageName, fnInfo.Name) + } + } + + endpoints = append(endpoints, endpoint) + } + + return TemplateData{ + ServiceName: service.Name, + ServiceLower: strings.ToLower(service.Name), + ServiceUpper: strings.ToUpper(service.Name), + Category: service.Category, + Package: packageName, + Description: service.Description, + BaseURL: service.BaseURL, + Timeout: getOrDefault(service.Timeout, 30), + RetryCount: getOrDefault(service.RetryCount, 3), + Endpoints: endpoints, + Timestamp: time.Now().Format("2006-01-02 15:04:05"), + ModuleName: globalConfig.ModuleName, + HasValidator: true, + HasLogger: globalConfig.EnableLogging, + HasSwagger: globalConfig.EnableSwagger, + GlobalConfig: globalConfig, + HandlerName: strings.Title(packageName), + } +} + +// Check if handler struct already exists in directory +func shouldGenerateHandlerStruct(baseDir, handlerName string) bool { + structSignature := fmt.Sprintf("type %sHandler struct", handlerName) + + files, err := ioutil.ReadDir(baseDir) + if err != nil { + return true // If directory doesn't exist, generate struct + } + + for _, file := range files { + if file.IsDir() || !strings.HasSuffix(file.Name(), ".go") { + continue + } + + filePath := filepath.Join(baseDir, file.Name()) + content, err := ioutil.ReadFile(filePath) + if err != nil { + continue + } + + if strings.Contains(string(content), structSignature) { + return false // Struct already exists + } + } + + return true // Struct not found, should generate +} + +// Check if constructor already exists +func shouldGenerateConstructor(baseDir, handlerName string) bool { + constructorSignature := fmt.Sprintf("func New%sHandler", handlerName) + + files, err := ioutil.ReadDir(baseDir) + if err != nil { + return true + } + + for _, file := range files { + if file.IsDir() || !strings.HasSuffix(file.Name(), ".go") { + continue + } + + filePath := filepath.Join(baseDir, file.Name()) + content, err := ioutil.ReadFile(filePath) + if err != nil { + continue + } + + if strings.Contains(string(content), constructorSignature) { + return false + } + } + + return true +} + +// Generate base handler file (struct + constructor only) +func generateBaseHandlerFile(serviceName string, svc Service, gc GlobalConfig) error { + baseDir := gc.OutputDir + if err := os.MkdirAll(baseDir, 0755); err != nil { + return fmt.Errorf("mkdir %s: %w", baseDir, err) + } + + baseFileName := fmt.Sprintf("%s_base.go", strings.ToLower(serviceName)) + baseFilePath := filepath.Join(baseDir, baseFileName) + + // Skip if base file already exists + if _, err := os.Stat(baseFilePath); err == nil { + fmt.Printf("โญ๏ธ Base handler already exists: %s\n", baseFilePath) + return nil + } + + templateData := TemplateData{ + ServiceName: svc.Name, + ServiceLower: strings.ToLower(svc.Name), + ServiceUpper: strings.ToUpper(svc.Name), + Category: svc.Category, + Package: "handlers", + Description: svc.Description, + BaseURL: svc.BaseURL, + Timeout: svc.Timeout, + RetryCount: svc.RetryCount, + Endpoints: []EndpointData{}, // Empty - base only + Timestamp: time.Now().Format("2006-01-02 15:04:05"), + ModuleName: gc.ModuleName, + HasValidator: true, + HasLogger: gc.EnableLogging, + HasSwagger: gc.EnableSwagger, + GlobalConfig: gc, + } + + return createBaseHandlerFile(baseFilePath, templateData) +} + +// Template untuk base handler (struct + constructor only) +const baseHandlerTemplate = `// Package handlers handles {{.HandlerName}} BPJS services - Base Handler +// Generated on: {{.Timestamp}} +package handlers + +import ( + "{{.ModuleName}}/internal/config" + "{{.ModuleName}}/internal/services/bpjs" + "{{.ModuleName}}/pkg/logger" + + "github.com/go-playground/validator/v10" +) + +// {{.HandlerName}}Handler handles {{.HandlerName}} BPJS services +type {{.HandlerName}}Handler struct { + service services.VClaimService + validator *validator.Validate + logger logger.Logger + config config.BpjsConfig +} + +// {{.HandlerName}}HandlerConfig contains configuration for {{.HandlerName}}Handler +type {{.HandlerName}}HandlerConfig struct { + BpjsConfig config.BpjsConfig + Logger logger.Logger + Validator *validator.Validate +} + +// New{{.HandlerName}}Handler creates a new {{.HandlerName}}Handler +func New{{.HandlerName}}Handler(cfg {{.HandlerName}}HandlerConfig) *{{.HandlerName}}Handler { + return &{{.HandlerName}}Handler{ + service: services.NewService(cfg.BpjsConfig), + validator: cfg.Validator, + logger: cfg.Logger, + config: cfg.BpjsConfig, + } +} +` + +// Function to create base handler file +func createBaseHandlerFile(filePath string, templateData TemplateData) error { + if err := os.MkdirAll(filepath.Dir(filePath), 0755); err != nil { + return err + } + + tmpl := template.New("baseHandler").Funcs(template.FuncMap{ + "title": strings.Title, + }) + + tmpl, err := tmpl.Parse(baseHandlerTemplate) + if err != nil { + return err + } + + file, err := os.Create(filePath) + if err != nil { + return err + } + defer file.Close() + + return tmpl.Execute(file, templateData) +} + +// Template untuk handler file lengkap (untuk file baru) +const handlerTemplate = `// Package {{.Package}} handles {{.HandlerName}} BPJS services +// Generated on: {{.Timestamp}} +package handlers + +import ( + "context" + "encoding/json" + "net/http" + "strings" + "time" + + "{{.ModuleName}}/internal/config" + "{{.ModuleName}}/internal/models" + "{{.ModuleName}}/internal/models/vclaim/{{.Package}}" + "{{.ModuleName}}/internal/services/bpjs" + "{{.ModuleName}}/pkg/logger" + + "github.com/gin-gonic/gin" + "github.com/go-playground/validator/v10" + "github.com/google/uuid" +) +// {{.HandlerName}}Handler handles {{.HandlerName}} BPJS services +type {{.HandlerName}}Handler struct { + service services.VClaimService + validator *validator.Validate + logger logger.Logger + config config.BpjsConfig +} +// {{.HandlerName}}HandlerConfig contains configuration for {{.HandlerName}}Handler +type {{.HandlerName}}HandlerConfig struct { + BpjsConfig config.BpjsConfig + Logger logger.Logger + Validator *validator.Validate +} +// New{{.HandlerName}}Handler creates a new {{.HandlerName}}Handler +func New{{.HandlerName}}Handler(cfg {{.HandlerName}}HandlerConfig) *{{.HandlerName}}Handler { + return &{{.HandlerName}}Handler{ + service: services.NewService(cfg.BpjsConfig), + validator: cfg.Validator, + logger: cfg.Logger, + config: cfg.BpjsConfig, + } +} +{{range .Endpoints}} +{{if .HasGet}} +// Get{{.Name}} godoc +// @Summary Get {{.Name}} data +// @Description {{.Description}} +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" {{range .PathParams}} +// @Param {{.}} path string true "{{.}}" example("example_value") {{end}} +// @Success 200 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully retrieved {{.Name}} data" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - {{.Name}} not found" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.GetRoutes}} [get] +func (h *{{$.HandlerName}}Handler) Get{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Get{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.GetPath}}", + {{range .PathParams}} + "{{.}}": c.Param("{{.}}"), + {{end}} + }) + {{end}} + + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + endpoint := "{{.GetPath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + resp, err := h.service.GetRawResponse(ctx, endpoint) + {{else}} + resp, err := h.service.GetRawResponse(ctx, "{{.GetPath}}") + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to get {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + c.JSON(http.StatusOK, response) +} +{{end}} + +{{if .HasPost}} +// Create{{.Name}} godoc +// @Summary Create new {{.Name}} +// @Description Create new {{.Name}} in BPJS system +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" +// @Param request body {{.ModelPackage}}.{{.RequestModel}} true "{{.Name}} data" +// @Success 201 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully created {{.Name}}" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized" +// @Failure 409 {object} models.ErrorResponseBpjs "Conflict" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.PostRoutes}} [post] +func (h *{{$.HandlerName}}Handler) Create{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Create{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.PostPath}}", + }) + {{end}} + + // Bind and validate request body + var req {{.ModelPackage}}.{{.RequestModel}} + if err := c.ShouldBindJSON(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to bind request body", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Invalid request body: " + err.Error(), + RequestID: requestID, + }) + return + } + + // Validate request structure + if err := h.validator.Struct(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Request validation failed", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Validation failed: " + err.Error(), + RequestID: requestID, + }) + return + } + + {{if .PathParams}} + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + endpoint := "{{.PostPath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + {{end}} + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + resp, err := h.service.PostRawResponse(ctx, endpoint, req) + {{else}} + resp, err := h.service.PostRawResponse(ctx, "{{.PostPath}}", req) + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to create {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + + // Handle specific BPJS errors + if strings.Contains(err.Error(), "409") || strings.Contains(err.Error(), "conflict") { + c.JSON(http.StatusConflict, models.ErrorResponseBpjs{ + Status: "error", + Message: "{{.Name}} already exists or conflict occurred", + RequestID: requestID, + }) + return + } + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + + {{if $.HasLogger}} + h.logger.Info("Successfully created {{.Name}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + + c.JSON(http.StatusCreated, response) +} +{{end}} + + +{{if .HasPut}} +// Update{{.Name}} godoc +// @Summary Update existing {{.Name}} +// @Description Update existing {{.Name}} in BPJS system +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" {{range .PathParams}} +// @Param {{.}} path string true "{{.}}" example("example_value") {{end}} +// @Param request body {{.ModelPackage}}.{{.RequestModel}} true "{{.Name}} update data" +// @Success 200 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully updated {{.Name}}" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - {{.Name}} not found" +// @Failure 409 {object} models.ErrorResponseBpjs "Conflict - update conflict occurred" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.PutRoutes}} [put] +func (h *{{$.HandlerName}}Handler) Update{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Update{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.PutPath}}", + {{range .PathParams}} + "{{.}}": c.Param("{{.}}"), + {{end}} + }) + {{end}} + + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + // Bind and validate request body + var req {{.ModelPackage}}.{{.RequestModel}} + if err := c.ShouldBindJSON(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to bind request body", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Invalid request body: " + err.Error(), + RequestID: requestID, + }) + return + } + + // Validate request structure + if err := h.validator.Struct(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Request validation failed", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Validation failed: " + err.Error(), + RequestID: requestID, + }) + return + } + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + endpoint := "{{.PutPath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + resp, err := h.service.PutRawResponse(ctx, endpoint, req) + {{else}} + resp, err := h.service.PutRawResponse(ctx, "{{.PutPath}}", req) + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to update {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + + // Handle specific BPJS errors + if strings.Contains(err.Error(), "404") || strings.Contains(err.Error(), "not found") { + c.JSON(http.StatusNotFound, models.ErrorResponseBpjs{ + Status: "error", + Message: "{{.Name}} not found", + RequestID: requestID, + }) + return + } + + if strings.Contains(err.Error(), "409") || strings.Contains(err.Error(), "conflict") { + c.JSON(http.StatusConflict, models.ErrorResponseBpjs{ + Status: "error", + Message: "Update conflict occurred", + RequestID: requestID, + }) + return + } + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + + {{if $.HasLogger}} + h.logger.Info("Successfully updated {{.Name}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + + c.JSON(http.StatusOK, response) +} +{{end}} + +{{if .HasDelete}} +// Delete{{.Name}} godoc +// @Summary Delete existing {{.Name}} +// @Description Delete existing {{.Name}} from BPJS system +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" {{range .PathParams}} +// @Param {{.}} path string true "{{.}}" example("example_value") {{end}} +// @Success 200 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully deleted {{.Name}}" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - {{.Name}} not found" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.DeleteRoutes}} [delete] +func (h *{{$.HandlerName}}Handler) Delete{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Delete{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.DeletePath}}", + {{range .PathParams}} + "{{.}}": c.Param("{{.}}"), + {{end}} + }) + {{end}} + + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + endpoint := "{{.DeletePath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + resp, err := h.service.DeleteRawResponse(ctx, endpoint) + {{else}} + resp, err := h.service.DeleteRawResponse(ctx, "{{.DeletePath}}") + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to delete {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + + // Handle specific BPJS errors + if strings.Contains(err.Error(), "404") || strings.Contains(err.Error(), "not found") { + c.JSON(http.StatusNotFound, models.ErrorResponseBpjs{ + Status: "error", + Message: "{{.Name}} not found", + RequestID: requestID, + }) + return + } + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } else { + // For delete operations, sometimes there's no data in response + response.Data = nil + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + + {{if $.HasLogger}} + h.logger.Info("Successfully deleted {{.Name}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + + c.JSON(http.StatusOK, response) +} +{{end}} +{{end}}` + +// Template untuk menambah function saja (untuk file yang sudah ada) +const functionsOnlyTemplate = `{{range .Endpoints}} +{{if .HasGet}} +// Get{{.Name}} godoc +// @Summary Get {{.Name}} data +// @Description {{.Description}} +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" {{range .PathParams}} +// @Param {{.}} path string true "{{.}}" example("example_value") {{end}} +// @Success 200 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully retrieved {{.Name}} data" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - {{.Name}} not found" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.GetRoutes}} [get] +func (h *{{$.HandlerName}}Handler) Get{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Get{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.GetPath}}", + {{range .PathParams}} + "{{.}}": c.Param("{{.}}"), + {{end}} + }) + {{end}} + + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + endpoint := "{{.GetPath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + resp, err := h.service.GetRawResponse(ctx, endpoint) + {{else}} + resp, err := h.service.GetRawResponse(ctx, "{{.GetPath}}") + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to get {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + c.JSON(http.StatusOK, response) +} +{{end}} + +{{if .HasPost}} +// Create{{.Name}} godoc +// @Summary Create new {{.Name}} +// @Description Create new {{.Name}} in BPJS system +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" +// @Param request body {{.ModelPackage}}.{{.RequestModel}} true "{{.Name}} data" +// @Success 201 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully created {{.Name}}" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized" +// @Failure 409 {object} models.ErrorResponseBpjs "Conflict" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.PostRoutes}} [post] +func (h *{{$.HandlerName}}Handler) Create{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Create{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.PostPath}}", + }) + {{end}} + + // Bind and validate request body + var req {{.ModelPackage}}.{{.RequestModel}} + if err := c.ShouldBindJSON(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to bind request body", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Invalid request body: " + err.Error(), + RequestID: requestID, + }) + return + } + + // Validate request structure + if err := h.validator.Struct(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Request validation failed", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Validation failed: " + err.Error(), + RequestID: requestID, + }) + return + } + + {{if .PathParams}} + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + endpoint := "{{.PostPath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + {{end}} + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + resp, err := h.service.PostRawResponse(ctx, endpoint, req) + {{else}} + resp, err := h.service.PostRawResponse(ctx, "{{.PostPath}}", req) + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to create {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + + // Handle specific BPJS errors + if strings.Contains(err.Error(), "409") || strings.Contains(err.Error(), "conflict") { + c.JSON(http.StatusConflict, models.ErrorResponseBpjs{ + Status: "error", + Message: "{{.Name}} already exists or conflict occurred", + RequestID: requestID, + }) + return + } + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + + {{if $.HasLogger}} + h.logger.Info("Successfully created {{.Name}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + + c.JSON(http.StatusCreated, response) +} +{{end}} + + +{{if .HasPut}} +// Update{{.Name}} godoc +// @Summary Update existing {{.Name}} +// @Description Update existing {{.Name}} in BPJS system +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" {{range .PathParams}} +// @Param {{.}} path string true "{{.}}" example("example_value") {{end}} +// @Param request body {{.ModelPackage}}.{{.RequestModel}} true "{{.Name}} update data" +// @Success 200 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully updated {{.Name}}" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - {{.Name}} not found" +// @Failure 409 {object} models.ErrorResponseBpjs "Conflict - update conflict occurred" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.PutRoutes}} [put] +func (h *{{$.HandlerName}}Handler) Update{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Update{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.PutPath}}", + {{range .PathParams}} + "{{.}}": c.Param("{{.}}"), + {{end}} + }) + {{end}} + + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + // Bind and validate request body + var req {{.ModelPackage}}.{{.RequestModel}} + if err := c.ShouldBindJSON(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to bind request body", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Invalid request body: " + err.Error(), + RequestID: requestID, + }) + return + } + + // Validate request structure + if err := h.validator.Struct(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Request validation failed", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Validation failed: " + err.Error(), + RequestID: requestID, + }) + return + } + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + endpoint := "{{.PutPath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + resp, err := h.service.PutRawResponse(ctx, endpoint, req) + {{else}} + resp, err := h.service.PutRawResponse(ctx, "{{.PutPath}}", req) + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to update {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + + // Handle specific BPJS errors + if strings.Contains(err.Error(), "404") || strings.Contains(err.Error(), "not found") { + c.JSON(http.StatusNotFound, models.ErrorResponseBpjs{ + Status: "error", + Message: "{{.Name}} not found", + RequestID: requestID, + }) + return + } + + if strings.Contains(err.Error(), "409") || strings.Contains(err.Error(), "conflict") { + c.JSON(http.StatusConflict, models.ErrorResponseBpjs{ + Status: "error", + Message: "Update conflict occurred", + RequestID: requestID, + }) + return + } + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + + {{if $.HasLogger}} + h.logger.Info("Successfully updated {{.Name}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + + c.JSON(http.StatusOK, response) +} +{{end}} + +{{if .HasDelete}} +// Delete{{.Name}} godoc +// @Summary Delete existing {{.Name}} +// @Description Delete existing {{.Name}} from BPJS system +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" {{range .PathParams}} +// @Param {{.}} path string true "{{.}}" example("example_value") {{end}} +// @Success 200 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully deleted {{.Name}}" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - {{.Name}} not found" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.DeleteRoutes}} [delete] +func (h *{{$.HandlerName}}Handler) Delete{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Delete{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.DeletePath}}", + {{range .PathParams}} + "{{.}}": c.Param("{{.}}"), + {{end}} + }) + {{end}} + + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + endpoint := "{{.DeletePath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + resp, err := h.service.DeleteRawResponse(ctx, endpoint) + {{else}} + resp, err := h.service.DeleteRawResponse(ctx, "{{.DeletePath}}") + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to delete {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + + // Handle specific BPJS errors + if strings.Contains(err.Error(), "404") || strings.Contains(err.Error(), "not found") { + c.JSON(http.StatusNotFound, models.ErrorResponseBpjs{ + Status: "error", + Message: "{{.Name}} not found", + RequestID: requestID, + }) + return + } + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } else { + // For delete operations, sometimes there's no data in response + response.Data = nil + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + + {{if $.HasLogger}} + h.logger.Info("Successfully deleted {{.Name}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + + c.JSON(http.StatusOK, response) +} +{{end}} +{{end}}` + +// Create new handler file +func createNewHandlerFile(filePath string, templateData TemplateData) error { + // Create directory + if err := os.MkdirAll(filepath.Dir(filePath), 0755); err != nil { + return err + } + + // Parse template + tmpl := template.New("handler").Funcs(template.FuncMap{ + "title": strings.Title, + "index": func(slice []string, i int) string { + if i >= 0 && i < len(slice) { + return slice[i] + } + return "" + }, + }) + + tmpl, err := tmpl.Parse(handlerTemplate) + if err != nil { + return err + } + + // Create file + file, err := os.Create(filePath) + if err != nil { + return err + } + defer file.Close() + + // Execute template + return tmpl.Execute(file, templateData) +} + +// Merge dengan existing file +func mergeWithExistingFile(filePath string, templateData TemplateData) error { + // Read existing content + existingContent, err := ioutil.ReadFile(filePath) + if err != nil { + return err + } + + // Generate new functions + newFunctions, err := generateNewFunctionsOnly(templateData) + if err != nil { + return err + } + + // Merge content + mergedContent := mergeGoFileContent(string(existingContent), newFunctions) + + // Write back + return ioutil.WriteFile(filePath, []byte(mergedContent), 0644) +} + +func generateNewFunctionsOnly(templateData TemplateData) (string, error) { + funcTemplate := ` +{{range .Endpoints}} +{{if .HasGet}} +// Get{{.Name}} godoc +// @Summary Get {{.Name}} data +// @Description {{.Description}} +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" {{range .PathParams}} +// @Param {{.}} path string true "{{.}}" example("example_value") {{end}} +// @Success 200 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully retrieved {{.Name}} data" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - {{.Name}} not found" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.GetRoutes}} [get] +func (h *{{$.HandlerName}}Handler) Get{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Get{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.GetPath}}", + {{range .PathParams}} + "{{.}}": c.Param("{{.}}"), + {{end}} + }) + {{end}} + + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + endpoint := "{{.GetPath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + resp, err := h.service.GetRawResponse(ctx, endpoint) + {{else}} + resp, err := h.service.GetRawResponse(ctx, "{{.GetPath}}") + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to get {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + c.JSON(http.StatusOK, response) +} +{{end}} + +{{if .HasPost}} +// Create{{.Name}} godoc +// @Summary Create new {{.Name}} +// @Description Create new {{.Name}} in BPJS system +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" +// @Param request body {{.ModelPackage}}.{{.RequestModel}} true "{{.Name}} data" +// @Success 201 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully created {{.Name}}" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized" +// @Failure 409 {object} models.ErrorResponseBpjs "Conflict" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.PostRoutes}} [post] +func (h *{{$.HandlerName}}Handler) Create{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Create{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.PostPath}}", + }) + {{end}} + + // Bind and validate request body + var req {{.ModelPackage}}.{{.RequestModel}} + if err := c.ShouldBindJSON(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to bind request body", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Invalid request body: " + err.Error(), + RequestID: requestID, + }) + return + } + + // Validate request structure + if err := h.validator.Struct(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Request validation failed", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Validation failed: " + err.Error(), + RequestID: requestID, + }) + return + } + + {{if .PathParams}} + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + endpoint := "{{.PostPath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + {{end}} + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + resp, err := h.service.PostRawResponse(ctx, endpoint, req) + {{else}} + resp, err := h.service.PostRawResponse(ctx, "{{.PostPath}}", req) + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to create {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + + // Handle specific BPJS errors + if strings.Contains(err.Error(), "409") || strings.Contains(err.Error(), "conflict") { + c.JSON(http.StatusConflict, models.ErrorResponseBpjs{ + Status: "error", + Message: "{{.Name}} already exists or conflict occurred", + RequestID: requestID, + }) + return + } + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + + {{if $.HasLogger}} + h.logger.Info("Successfully created {{.Name}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + + c.JSON(http.StatusCreated, response) +} +{{end}} + + +{{if .HasPut}} +// Update{{.Name}} godoc +// @Summary Update existing {{.Name}} +// @Description Update existing {{.Name}} in BPJS system +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" {{range .PathParams}} +// @Param {{.}} path string true "{{.}}" example("example_value") {{end}} +// @Param request body {{.ModelPackage}}.{{.RequestModel}} true "{{.Name}} update data" +// @Success 200 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully updated {{.Name}}" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - {{.Name}} not found" +// @Failure 409 {object} models.ErrorResponseBpjs "Conflict - update conflict occurred" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.PutRoutes}} [put] +func (h *{{$.HandlerName}}Handler) Update{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Update{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.PutPath}}", + {{range .PathParams}} + "{{.}}": c.Param("{{.}}"), + {{end}} + }) + {{end}} + + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + // Bind and validate request body + var req {{.ModelPackage}}.{{.RequestModel}} + if err := c.ShouldBindJSON(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to bind request body", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Invalid request body: " + err.Error(), + RequestID: requestID, + }) + return + } + + // Validate request structure + if err := h.validator.Struct(&req); err != nil { + {{if $.HasLogger}} + h.logger.Error("Request validation failed", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Validation failed: " + err.Error(), + RequestID: requestID, + }) + return + } + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + endpoint := "{{.PutPath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + resp, err := h.service.PutRawResponse(ctx, endpoint, req) + {{else}} + resp, err := h.service.PutRawResponse(ctx, "{{.PutPath}}", req) + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to update {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + + // Handle specific BPJS errors + if strings.Contains(err.Error(), "404") || strings.Contains(err.Error(), "not found") { + c.JSON(http.StatusNotFound, models.ErrorResponseBpjs{ + Status: "error", + Message: "{{.Name}} not found", + RequestID: requestID, + }) + return + } + + if strings.Contains(err.Error(), "409") || strings.Contains(err.Error(), "conflict") { + c.JSON(http.StatusConflict, models.ErrorResponseBpjs{ + Status: "error", + Message: "Update conflict occurred", + RequestID: requestID, + }) + return + } + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + + {{if $.HasLogger}} + h.logger.Info("Successfully updated {{.Name}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + + c.JSON(http.StatusOK, response) +} +{{end}} + +{{if .HasDelete}} +// Delete{{.Name}} godoc +// @Summary Delete existing {{.Name}} +// @Description Delete existing {{.Name}} from BPJS system +// @Tags {{index .Tags 0}} +// @Accept json +// @Produce json {{if .RequireAuth}} +// @Security ApiKeyAuth {{end}} +// @Param X-Request-ID header string false "Request ID for tracking" {{range .PathParams}} +// @Param {{.}} path string true "{{.}}" example("example_value") {{end}} +// @Success 200 {object} {{.ModelPackage}}.{{.ResponseModel}} "Successfully deleted {{.Name}}" +// @Failure 400 {object} models.ErrorResponseBpjs "Bad request - invalid parameters" +// @Failure 401 {object} models.ErrorResponseBpjs "Unauthorized - invalid API credentials" +// @Failure 404 {object} models.ErrorResponseBpjs "Not found - {{.Name}} not found" +// @Failure 500 {object} models.ErrorResponseBpjs "Internal server error" +// @Router {{.DeleteRoutes}} [delete] +func (h *{{$.HandlerName}}Handler) Delete{{.Name}}(c *gin.Context) { + ctx, cancel := context.WithTimeout(c.Request.Context(), {{$.Timeout}}*time.Second) + defer cancel() + + // Generate request ID if not present + requestID := c.GetHeader("X-Request-ID") + if requestID == "" { + requestID = uuid.New().String() + c.Header("X-Request-ID", requestID) + } + + {{if $.HasLogger}} + h.logger.Info("Processing Delete{{.Name}} request", map[string]interface{}{ + "request_id": requestID, + "endpoint": "{{.DeletePath}}", + {{range .PathParams}} + "{{.}}": c.Param("{{.}}"), + {{end}} + }) + {{end}} + + // Extract path parameters + {{range .PathParams}} + {{.}} := c.Param("{{.}}") + if {{.}} == "" { + {{if $.HasLogger}} + h.logger.Error("Missing required parameter {{.}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + c.JSON(http.StatusBadRequest, models.ErrorResponseBpjs{ + Status: "error", + Message: "Missing required parameter {{.}}", + RequestID: requestID, + }) + return + } + {{end}} + + // Call service method + var response {{.ModelPackage}}.{{.ResponseModel}} + {{if .PathParams}} + endpoint := "{{.DeletePath}}" + {{range .PathParams}} + endpoint = strings.Replace(endpoint, ":{{.}}", {{.}}, 1) + {{end}} + resp, err := h.service.DeleteRawResponse(ctx, endpoint) + {{else}} + resp, err := h.service.DeleteRawResponse(ctx, "{{.DeletePath}}") + {{end}} + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to delete {{.Name}}", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + + // Handle specific BPJS errors + if strings.Contains(err.Error(), "404") || strings.Contains(err.Error(), "not found") { + c.JSON(http.StatusNotFound, models.ErrorResponseBpjs{ + Status: "error", + Message: "{{.Name}} not found", + RequestID: requestID, + }) + return + } + + c.JSON(http.StatusInternalServerError, models.ErrorResponseBpjs{ + Status: "error", + Message: "Internal server error", + RequestID: requestID, + }) + return + } + + // Map the raw response + response.MetaData = resp.MetaData + if resp.Response != nil { + response.Data = &{{.ModelPackage}}.{{.DataModel}}{} + if respStr, ok := resp.Response.(string); ok { + // Decrypt the response string + consID, secretKey, _, tstamp, _ := h.config.SetHeader() + decryptedResp, err := services.ResponseVclaim(respStr, consID+secretKey+tstamp) + if err != nil { + {{if $.HasLogger}} + h.logger.Error("Failed to decrypt response", map[string]interface{}{ + "error": err.Error(), + "request_id": requestID, + }) + {{end}} + } else { + json.Unmarshal([]byte(decryptedResp), response.Data) + } + } else if respMap, ok := resp.Response.(map[string]interface{}); ok { + // Response is already unmarshaled JSON + if dataMap, exists := respMap["{{.ModelPackage}}"]; exists { + dataBytes, _ := json.Marshal(dataMap) + json.Unmarshal(dataBytes, response.Data) + } else { + // Try to unmarshal the whole response + respBytes, _ := json.Marshal(resp.Response) + json.Unmarshal(respBytes, response.Data) + } + } + } else { + // For delete operations, sometimes there's no data in response + response.Data = nil + } + + // Ensure response has proper fields + response.Status = "success" + response.RequestID = requestID + + {{if $.HasLogger}} + h.logger.Info("Successfully deleted {{.Name}}", map[string]interface{}{ + "request_id": requestID, + }) + {{end}} + + c.JSON(http.StatusOK, response) +} +{{end}} +{{end}}` + + tmpl := template.New("functions") + tmpl, err := tmpl.Parse(funcTemplate) + if err != nil { + return "", err + } + + var result strings.Builder + err = tmpl.Execute(&result, templateData) + if err != nil { + return "", err + } + + // Remove duplicate function definitions by simple regex grouping + // This is a simple approach to avoid duplicate functions in merged content + funcRegex := regexp.MustCompile(`(?s)(func \(h \*\w+Handler\) Get\w+\(c \*gin.Context\) \{.*?\})`) + matches := funcRegex.FindAllString(result.String(), -1) + uniqueFuncs := make(map[string]bool) + var uniqueResult strings.Builder + for _, m := range matches { + if !uniqueFuncs[m] { + uniqueFuncs[m] = true + uniqueResult.WriteString(m) + uniqueResult.WriteString("\n\n") + } + } + + // If no matches found, return original result + if uniqueResult.Len() == 0 { + return result.String(), nil + } + + return uniqueResult.String(), nil +} + +func mergeGoFileContent(existingContent, newFunctions string) string { + // Find last closing brace + re := regexp.MustCompile(`}\s*$`) + lastBraceIndex := re.FindStringIndex(existingContent) + + if lastBraceIndex == nil { + return existingContent + "\n" + newFunctions + } + + before := existingContent[:lastBraceIndex[0]] + after := existingContent[lastBraceIndex[0]:] + + return before + "\n" + newFunctions + "\n" + after +} + +// Main function +func main() { + if len(os.Args) < 2 { + printUsage() + os.Exit(1) + } + + configFile := os.Args[1] + var targetService string + if len(os.Args) > 2 { + targetService = os.Args[2] + } + + config, err := loadConfig(configFile) + if err != nil { + fmt.Printf("Error loading config: %v\n", err) + os.Exit(1) + } + + fmt.Println("๐Ÿš€ Starting BPJS Dynamic Handler Generation with Validation...") + fmt.Printf("๐Ÿ“ Config file: %s\n", configFile) + if targetService != "" { + fmt.Printf("๐ŸŽฏ Target service: %s\n", targetService) + } + + generated := 0 + errors := 0 + + for serviceName, service := range config.Services { + if targetService != "" && serviceName != targetService { + continue + } + + err := generateHandlerWithValidation(serviceName, service, config.Global) + if err != nil { + fmt.Printf("โŒ Error generating handler for %s: %v\n", serviceName, err) + errors++ + continue + } + + generated++ + } + + // Summary + fmt.Println("\n๐Ÿ“Š Generation Summary:") + fmt.Printf("โœ… Successfully processed: %d services\n", generated) + if errors > 0 { + fmt.Printf("โŒ Failed: %d services\n", errors) + } + + if generated > 0 { + fmt.Println("๐ŸŽ‰ Generation completed successfully!") + } +} + +// Helper functions +func loadConfig(filename string) (*ServiceConfig, error) { + data, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + + var config ServiceConfig + err = yaml.Unmarshal(data, &config) + if err != nil { + return nil, err + } + + // Set defaults + if config.Global.ModuleName == "" { + config.Global.ModuleName = "api-service" + } + if config.Global.OutputDir == "" { + config.Global.OutputDir = "internal/handlers" + } + + return &config, nil +} + +func getOrDefault(value, defaultValue int) int { + if value == 0 { + return defaultValue + } + return value +} + +func printUsage() { + fmt.Println("BPJS Dynamic Handler Generator with Function Validation") + fmt.Println() + fmt.Println("Usage:") + fmt.Println(" go run generate-handler.go [service-name]") + fmt.Println() + fmt.Println("Examples:") + fmt.Println(" go run generate-handler.go services-config-bpjs.yaml") + fmt.Println(" go run generate-handler.go services-config-bpjs.yaml vclaim") +} + +// Generate routes file untuk service +func generateRoutes(serviceName string, svc Service, gc GlobalConfig) error { + routesFilePath := "internal/routes/v1/routes.go" + routesContent, err := ioutil.ReadFile(routesFilePath) + if err != nil { + return fmt.Errorf("failed to read routes file: %w", err) + } + + routesContentStr := string(routesContent) + + // Check if routes are already registered + if strings.Contains(routesContentStr, fmt.Sprintf("Register%sRoutes", svc.Name)) { + fmt.Printf("โš ๏ธ Routes for %s already registered in main routes file\n", svc.Name) + return nil + } + + var imports []string + var allRoutes []string + + // โœ… PERBAIKAN: Gunakan groupName dengan benar + // โœ… VALIDASI: Track handler folder yang sudah diimport + processedFolders := make(map[string]bool) + for groupName, grp := range svc.Endpoints { + // Import berdasarkan handler folder + // imports = append(imports, fmt.Sprintf("\t%sHandlers \"%s/internal/handlers/%s\"", + // grp.HandlerFolder, gc.ModuleName, grp.HandlerFolder)) + if !processedFolders[grp.HandlerFolder] { + importLine := fmt.Sprintf("\t%sHandlers \"%s/internal/handlers/%s\"", + grp.HandlerFolder, gc.ModuleName, grp.HandlerFolder) + imports = append(imports, importLine) + processedFolders[grp.HandlerFolder] = true + fmt.Printf("โœ… Added import: %sHandlers\n", grp.HandlerFolder) + } else { + fmt.Printf("โš ๏ธ Skipped duplicate import for folder: %s\n", grp.HandlerFolder) + } + + var routesCode strings.Builder + + // Gunakan groupName untuk comment dan identifier + routesCode.WriteString(fmt.Sprintf("\n\t// %s (%s) routes\n", grp.Description, groupName)) + + // Handler instantiation menggunakan HandlerName dari config + routesCode.WriteString(fmt.Sprintf("\t%sHandler := %sHandlers.New%sHandler(%sHandlers.%sHandlerConfig{\n", + strings.ToLower(grp.HandlerName), + grp.HandlerFolder, + grp.HandlerName, + grp.HandlerFolder, + grp.HandlerName)) + + routesCode.WriteString("\t\tBpjsConfig: cfg.Bpjs,\n") + routesCode.WriteString("\t\tLogger: *logger.Default(),\n") + routesCode.WriteString("\t\tValidator: validator.New(),\n") + routesCode.WriteString("\t})\n") + + // โœ… GUNAKAN groupName untuk route group path + routesCode.WriteString(fmt.Sprintf("\t%sGroup := v1.Group(\"/%s\")\n", + strings.ToLower(grp.HandlerName), groupName)) // โ† Gunakan groupName di sini + + // Process functions + for fname, fcfg := range grp.Functions { + td := processFunctionData(svc, grp, fname, fcfg, gc) + + for _, endpoint := range td.Endpoints { + handlerVar := strings.ToLower(grp.HandlerName) + "Handler" + groupVar := strings.ToLower(grp.HandlerName) + "Group" + + // โœ… MODIFIKASI: Loop through methods dan gunakan specific routes + for _, method := range fcfg.Methods { + var cleanPath string + + // โœ… Pilih path berdasarkan method + switch strings.ToUpper(method) { + case "GET": + cleanPath = fcfg.GetRoutes + if cleanPath == "" { + cleanPath = fcfg.GetPath // fallback ke get_path + } + case "POST": + cleanPath = fcfg.PostRoutes + if cleanPath == "" { + cleanPath = fcfg.PostPath // fallback ke post_path + } + case "PUT": + cleanPath = fcfg.PutRoutes + if cleanPath == "" { + cleanPath = fcfg.PutPath // fallback ke put_path + } + case "DELETE": + cleanPath = fcfg.DeleteRoutes + if cleanPath == "" { + cleanPath = fcfg.DeletePath // fallback ke delete_path + } + default: + fmt.Printf("โš ๏ธ Unsupported HTTP method: %s for function %s\n", method, fname) + continue + } + + // โœ… Final fallback ke path jika specific route kosong + if cleanPath == "" { + cleanPath = fcfg.Path + } + + // โœ… Bersihkan path - hapus prefix groupName jika ada + if strings.HasPrefix(cleanPath, "/"+groupName) { + cleanPath = strings.TrimPrefix(cleanPath, "/"+groupName) + } + if cleanPath == "" { + cleanPath = "/" + } + + // โœ… Generate route berdasarkan method + switch strings.ToUpper(method) { + case "GET": + routesCode.WriteString(fmt.Sprintf("\t%s.GET(\"%s\", %s.Get%s)\n", + groupVar, cleanPath, handlerVar, endpoint.Name)) + case "POST": + routesCode.WriteString(fmt.Sprintf("\t%s.POST(\"%s\", %s.Create%s)\n", + groupVar, cleanPath, handlerVar, endpoint.Name)) + case "PUT": + routesCode.WriteString(fmt.Sprintf("\t%s.PUT(\"%s\", %s.Update%s)\n", + groupVar, cleanPath, handlerVar, endpoint.Name)) + case "DELETE": + routesCode.WriteString(fmt.Sprintf("\t%s.DELETE(\"%s\", %s.Delete%s)\n", + groupVar, cleanPath, handlerVar, endpoint.Name)) + } + } + } + } + + allRoutes = append(allRoutes, routesCode.String()) + } + + // โœ… PERBAIKAN: Insert imports setelah "api-service/internal/database" + if len(imports) > 0 { + // โœ… PERBAIKAN: Hilangkan newline di awal, langsung import lines saja + importSection := strings.Join(imports, "\n") + "\n" + + // โœ… PERBAIKAN: Cari posisi setelah "api-service/internal/database" + databaseImportMarker := fmt.Sprintf("\"%s/internal/database\"", gc.ModuleName) + if strings.Contains(routesContentStr, databaseImportMarker) { + // Temukan posisi marker + markerPos := strings.Index(routesContentStr, databaseImportMarker) + // Temukan akhir baris dari marker + endOfLinePos := strings.Index(routesContentStr[markerPos:], "\n") + markerPos + // Insert import section setelah baris marker + routesContentStr = routesContentStr[:endOfLinePos+1] + importSection + routesContentStr[endOfLinePos+1:] + fmt.Printf("โœ… Inserted imports after database import\n") + } else { + // Fallback: Insert setelah "import (" jika marker tidak ditemukan + importMarker := "import (" + if strings.Contains(routesContentStr, importMarker) { + importIndex := strings.Index(routesContentStr, importMarker) + len(importMarker) + routesContentStr = routesContentStr[:importIndex] + "\n" + importSection + routesContentStr[importIndex:] + fmt.Printf("โš ๏ธ Database import not found, inserted after import (\n") + } else { + fmt.Printf("โš ๏ธ Could not find import section to insert imports\n") + } + } + } + + // Find and insert routes + publishedRoutesMarker := "// ============= PUBLISHED ROUTES ===============================================" + if !strings.Contains(routesContentStr, publishedRoutesMarker) { + return fmt.Errorf("PUBLISHED ROUTES marker not found in routes.go") + } + + insertionPoint := strings.Index(routesContentStr, publishedRoutesMarker) + len(publishedRoutesMarker) + newRoutesContent := routesContentStr[:insertionPoint] + "\n" + strings.Join(allRoutes, "\n") + "\n" + routesContentStr[insertionPoint:] + + err = ioutil.WriteFile(routesFilePath, []byte(newRoutesContent), 0644) + if err != nil { + return fmt.Errorf("failed to write updated routes file: %w", err) + } + + fmt.Printf("โœ… Updated main routes file with %s routes\n", svc.Name) + return nil +} diff --git a/tools/bpjs/services-config-bpjs.yaml b/tools/bpjs/services-config-bpjs.yaml new file mode 100644 index 0000000..4dd95e9 --- /dev/null +++ b/tools/bpjs/services-config-bpjs.yaml @@ -0,0 +1,160 @@ +global: + module_name: "api-service" + output_dir: "internal/handlers" + enable_swagger: true + enable_logging: true + +services: + vclaim: + name: "VClaim" + category: "vclaim" + package: "vclaim" + description: "BPJS VClaim service for eligibility and SEP management" + base_url: "https://apijkn.bpjs-kesehatan.go.id/vclaim-rest" + timeout: 30 + retry_count: 3 + + endpoints: + peserta: + description: "Participant eligibility information" + handler_folder: "peserta" + handler_file: "peserta.go" + handler_name: "Peserta" + functions: + bynokartu: + methods: ["GET"] + path: "/peserta/:nokartu" + get_routes: "/nokartu/:nokartu" + # post_routes: "/Peserta/nokartu/:nokartu" + # put_routes: "/Peserta/nokartu/:nokartu" + # delete_routes: "/Peserta/nokartu/:nokartu" + get_path: "/Peserta/nokartu/:nokartu/tglSEP/:tglSEP" + # post_path: "/peserta" + # put_path: "/peserta/:nokartu" + # delete_path: "/peserta/:nokartu" + model: "PesertaRequest" + response_model: "PesertaResponse" + request_model: "RujukanRequest" + description: "Get participant eligibility information by card number" + summary: "Get Participant Info by No Kartu" + tags: ["Peserta"] + require_auth: true + cache_enabled: true + cache_ttl: 300 + + bynik: + methods: ["GET"] + path: "/peserta/nik/:nik" + get_routes: "/nik/:nik" + # post_routes: "/Peserta/nik/:nik" + # put_routes: "/Peserta/nik/:nik" + # delete_routes: "/Peserta/nik/:nik" + get_path: "/Peserta/nik/:nik/tglSEP/:tglSEP" + # post_path: "/peserta" + # put_path: "/peserta/nik/:nik" + # delete_path: "/peserta/nik/:nik" + model: "PesertaRequest" + response_model: "PesertaResponse" + request_model: "PesertaRequest" + description: "Get participant eligibility information by NIK" + summary: "Get Participant Info by NIK" + tags: ["Peserta"] + require_auth: true + cache_enabled: true + cache_ttl: 300 + + rujukan: + description: "Rujukan management endpoints" + handler_folder: "rujukan" + handler_file: "rujukan.go" + handler_name: "Rujukan" + functions: + rujukan: + methods: ["POST", "PUT", "DELETE"] + path: "/Rujukan" + # get_routes: "/Rujukan/:norujukan" + post_routes: "/Rujukan/:norujukan" + put_routes: "/Rujukan/:norujukan" + delete_routes: "/Rujukan/:norujukan" + # get_path: "/Rujukan/:norujukan" + post_path: "/Rujukan" + put_path: "/Rujukan/:norujukan" + delete_path: "/Rujukan/:norujukan" + model: "RujukanRequest" + response_model: "RujukanResponse" + request_model: "RujukanRequest" + description: "Manage rujukan" + summary: "Rujukan Management" + tags: ["Rujukan"] + require_auth: true + cache_enabled: true + cache_ttl: 180 + rujukanbalik: + methods: ["POST", "PUT", "DELETE"] + path: "/Rujukanbalik" + # get_routes: "/Rujukanbalik/:norujukan" + post_routes: "/Rujukanbalik/:norujukan" + put_routes: "/Rujukanbalik/:norujukan" + delete_routes: "/Rujukanbalik/:norujukan" + # get_path: "/Rujukanbalik/:norujukan" + post_path: "/Rujukanbalik" + put_path: "/Rujukanbalik/:norujukan" + delete_path: "/Rujukanbalik/:norujukan" + model: "RujukanRequest" + response_model: "RujukanResponse" + request_model: "RujukanRequest" + description: "Manage rujukan" + summary: "Rujukan Management" + tags: ["Rujukan"] + require_auth: true + cache_enabled: true + cache_ttl: 180 + + search: + description: "Search for rujukan endpoints" + handler_folder: "rujukan" + handler_file: "search.go" + handler_name: "Search" + functions: + bynorujukan: + methods: ["GET"] + path: "/Rujukan/:norujukan" + get_routes: "/bynorujukan/:norujukan" + # post_routes: "/bynorujukan/:norujukan" + # put_routes: "/bynorujukan/:norujukan" + # delete_routes: "/bynorujukan/:norujukan" + get_path: "/Rujukan/:norujukan" + # post_path: "/Rujukan" + # put_path: "/Rujukan/:norujukan" + # delete_path: "/Rujukan/:norujukan" + model: "RujukanRequest" + response_model: "RujukanResponse" + request_model: "RujukanRequest" + description: "Get rujukan by nomor rujukan" + summary: "Rujukan Management" + tags: ["Rujukan"] + require_auth: true + cache_enabled: true + cache_ttl: 300 + + bynokartu: + methods: ["GET"] + path: "/Rujukan/:nokartu" + get_routes: "/bynokartu/:nokartu" + # post_routes: "/bynokartu/:nokartu" + # put_routes: "/bynokartu/:nokartu" + # delete_routes: "/bynokartu/:nokartu" + get_path: "/Rujukan/:nokartu" + # post_path: "/Rujukan" + # put_path: "/Rujukan/:nokartu" + # delete_path: "/Rujukan/:nokartu" + model: "RujukanRequest" + response_model: "RujukanResponse" + request_model: "RujukanRequest" + description: "Get rujukan by card number" + summary: "Rujukan Management" + tags: ["Rujukan"] + require_auth: true + cache_enabled: true + cache_ttl: 300 + diff --git a/tools/general/generate-handler.go b/tools/general/generate-handler.go new file mode 100644 index 0000000..bddbdcb --- /dev/null +++ b/tools/general/generate-handler.go @@ -0,0 +1,1740 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "time" +) + +// HandlerData contains template data for handler generation +type HandlerData struct { + Name string + NameLower string + NamePlural string + Category string // Untuk backward compatibility (bagian pertama) + DirPath string // Path direktori lengkap + ModuleName string + TableName string + HasGet bool + HasPost bool + HasPut bool + HasDelete bool + HasStats bool + HasDynamic bool + HasSearch bool + HasFilter bool + HasPagination bool + Timestamp string +} + +type PathInfo struct { + Category string + EntityName string + DirPath string + FilePath string +} + +// parseEntityPath - Logic parsing yang diperbaiki +func parseEntityPath(entityPath string) (*PathInfo, error) { + if strings.TrimSpace(entityPath) == "" { + return nil, fmt.Errorf("entity path cannot be empty") + } + var pathInfo PathInfo + parts := strings.Split(entityPath, "/") + // Validasi minimal 1 bagian (file saja) dan maksimal 4 + if len(parts) < 1 || len(parts) > 4 { + return nil, fmt.Errorf("invalid path format: use up to 4 levels like 'level1/level2/level3/entity'") + } + // Validasi bagian kosong + for i, part := range parts { + if strings.TrimSpace(part) == "" { + return nil, fmt.Errorf("empty path segment at position %d", i+1) + } + } + + pathInfo.EntityName = parts[len(parts)-1] + if len(parts) > 1 { + pathInfo.Category = parts[len(parts)-2] + pathInfo.DirPath = strings.Join(parts[:len(parts)-1], "/") + pathInfo.FilePath = pathInfo.DirPath + "/" + strings.ToLower(pathInfo.EntityName) + ".go" + } else { + pathInfo.Category = "models" + pathInfo.DirPath = "" + pathInfo.FilePath = strings.ToLower(pathInfo.EntityName) + ".go" + } + return &pathInfo, nil +} + +// validateMethods - Validasi method yang diinput +func validateMethods(methods []string) error { + validMethods := map[string]bool{ + "get": true, "post": true, "put": true, "delete": true, + "stats": true, "dynamic": true, "search": true, + } + + for _, method := range methods { + if !validMethods[strings.ToLower(method)] { + return fmt.Errorf("invalid method: %s. Valid methods: get, post, put, delete, stats, dynamic, search", method) + } + } + return nil +} + +// generateTableName - Generate table name berdasarkan path lengkap +func generateTableName(pathInfo *PathInfo) string { + entityLower := strings.ToLower(pathInfo.EntityName) + + if pathInfo.DirPath != "" { + // Replace "/" dengan "_" untuk table name + pathForTable := strings.ReplaceAll(pathInfo.DirPath, "/", "_") + return "data_" + pathForTable + "_" + entityLower + } + return "data_" + entityLower +} + +// createDirectories - Buat direktori sesuai struktur path +func createDirectories(pathInfo *PathInfo) (string, string, error) { + var handlerDir, modelDir string + + if pathInfo.DirPath != "" { + handlerDir = filepath.Join("internal", "handlers", pathInfo.DirPath) + modelDir = filepath.Join("internal", "models", pathInfo.DirPath) + } else { + handlerDir = filepath.Join("internal", "handlers") + modelDir = filepath.Join("internal", "models") + } + + // Create directories + for _, dir := range []string{handlerDir, modelDir} { + if err := os.MkdirAll(dir, 0755); err != nil { + return "", "", fmt.Errorf("failed to create directory %s: %v", dir, err) + } + } + + return handlerDir, modelDir, nil +} + +// setMethods - Set method flags berdasarkan input +func setMethods(data *HandlerData, methods []string) { + methodMap := map[string]*bool{ + "get": &data.HasGet, + "post": &data.HasPost, + "put": &data.HasPut, + "delete": &data.HasDelete, + "stats": &data.HasStats, + "dynamic": &data.HasDynamic, + "search": &data.HasSearch, + } + + for _, method := range methods { + if flag, exists := methodMap[strings.ToLower(method)]; exists { + *flag = true + } + } + + // Always add stats if we have get + if data.HasGet { + data.HasStats = true + } +} + +func main() { + // Validasi argument + if len(os.Args) < 2 { + fmt.Println("Usage: go run generate-handler.go [path/]entity [methods]") + fmt.Println("Examples:") + fmt.Println(" go run generate-handler.go product get post put delete") + fmt.Println(" go run generate-handler.go retribusi/tarif get post put delete dynamic search") + fmt.Println(" go run generate-handler.go product/category/subcategory/item get post") + fmt.Println("\nSupported methods: get, post, put, delete, stats, dynamic, search") + os.Exit(1) + } + + // Parse entity path + entityPath := strings.TrimSpace(os.Args[1]) + pathInfo, err := parseEntityPath(entityPath) + if err != nil { + fmt.Printf("โŒ Error parsing path: %v\n", err) + os.Exit(1) + } + + // Parse methods + var methods []string + if len(os.Args) > 2 { + methods = os.Args[2:] + } else { + // Default methods with advanced features + methods = []string{"get", "post", "put", "delete", "dynamic", "search"} + } + + // Validate methods + if err := validateMethods(methods); err != nil { + fmt.Printf("โŒ %v\n", err) + os.Exit(1) + } + + // Format names + entityName := strings.Title(pathInfo.EntityName) // PascalCase entity name + entityLower := strings.ToLower(pathInfo.EntityName) + entityPlural := entityLower + "s" + + // Generate table name + tableName := generateTableName(pathInfo) + + // Create HandlerData + data := HandlerData{ + Name: entityName, + NameLower: entityLower, + NamePlural: entityPlural, + Category: pathInfo.Category, + DirPath: pathInfo.DirPath, + ModuleName: "api-service", + TableName: tableName, + HasPagination: true, + HasFilter: true, + Timestamp: time.Now().Format("2006-01-02 15:04:05"), + } + + // Set methods + setMethods(&data, methods) + + // Create directories + handlerDir, modelDir, err := createDirectories(pathInfo) + if err != nil { + fmt.Printf("โŒ Error creating directories: %v\n", err) + os.Exit(1) + } + + // Generate files + generateHandlerFile(data, handlerDir) + generateModelFile(data, modelDir) + updateRoutesFile(data) + + // Success output + fmt.Printf("โœ… Successfully generated handler: %s\n", entityName) + if pathInfo.Category != "" { + fmt.Printf("๐Ÿ“ Category: %s\n", pathInfo.Category) + } + if pathInfo.DirPath != "" { + fmt.Printf("๐Ÿ“‚ Path: %s\n", pathInfo.DirPath) + } + fmt.Printf("๐Ÿ“„ Handler: %s\n", filepath.Join(handlerDir, entityLower+".go")) + fmt.Printf("๐Ÿ“„ Model: %s\n", filepath.Join(modelDir, entityLower+".go")) + fmt.Printf("๐Ÿ—„๏ธ Table: %s\n", tableName) + fmt.Printf("๐Ÿ› ๏ธ Methods: %s\n", strings.Join(methods, ", ")) +} + +// ================= HANDLER GENERATION ===================== +func generateHandlerFile(data HandlerData, handlerDir string) { + // var modelsImportPath string + // if data.Category != "" { + // modelsImportPath = data.ModuleName + "/internal/models/" + data.Category + // } else { + // modelsImportPath = data.ModuleName + "/internal/models" + // } + + // pakai strings.Builder biar lebih clean + var handlerContent strings.Builder + + // Header + handlerContent.WriteString("package handlers\n\n") + handlerContent.WriteString("import (\n") + handlerContent.WriteString(` "` + data.ModuleName + `/internal/config"` + "\n") + handlerContent.WriteString(` "` + data.ModuleName + `/internal/database"` + "\n") + handlerContent.WriteString(` models "` + data.ModuleName + `/internal/models"` + "\n") + if data.Category != "models" { + handlerContent.WriteString(` "` + data.ModuleName + `/internal/models/` + data.Category + `"` + "\n") + } + + // Conditional imports + if data.HasDynamic || data.HasSearch { + handlerContent.WriteString(` utils "` + data.ModuleName + `/internal/utils/filters"` + "\n") + } + + handlerContent.WriteString(` "` + data.ModuleName + `/internal/utils/validation"` + "\n") + handlerContent.WriteString(` "context"` + "\n") + handlerContent.WriteString(` "database/sql"` + "\n") + handlerContent.WriteString(` "fmt"` + "\n") + handlerContent.WriteString(` "log"` + "\n") + handlerContent.WriteString(` "net/http"` + "\n") + handlerContent.WriteString(` "strconv"` + "\n") + handlerContent.WriteString(` "strings"` + "\n") + handlerContent.WriteString(` "sync"` + "\n") + handlerContent.WriteString(` "time"` + "\n\n") + handlerContent.WriteString(` "github.com/gin-gonic/gin"` + "\n") + handlerContent.WriteString(` "github.com/go-playground/validator/v10"` + "\n") + handlerContent.WriteString(` "github.com/google/uuid"` + "\n") + handlerContent.WriteString(")\n\n") + + // Vars + handlerContent.WriteString("var (\n") + handlerContent.WriteString(" " + data.NameLower + "db database.Service\n") + handlerContent.WriteString(" " + data.NameLower + "once sync.Once\n") + handlerContent.WriteString(" " + data.NameLower + "validate *validator.Validate\n") + handlerContent.WriteString(")\n\n") + + // init func + handlerContent.WriteString("// Initialize the database connection and validator\n") + handlerContent.WriteString("func init() {\n") + handlerContent.WriteString(" " + data.NameLower + "once.Do(func() {\n") + handlerContent.WriteString(" " + data.NameLower + "db = database.New(config.LoadConfig())\n") + handlerContent.WriteString(" " + data.NameLower + "validate = validator.New()\n") + handlerContent.WriteString(" " + data.NameLower + "validate.RegisterValidation(\"" + data.NameLower + "_status\", validate" + data.Name + "Status)\n") + handlerContent.WriteString(" if " + data.NameLower + "db == nil {\n") + handlerContent.WriteString(" log.Fatal(\"Failed to initialize database connection\")\n") + handlerContent.WriteString(" }\n") + handlerContent.WriteString(" })\n") + handlerContent.WriteString("}\n\n") + + // Custom validation + handlerContent.WriteString("// Custom validation for " + data.NameLower + " status\n") + handlerContent.WriteString("func validate" + data.Name + "Status(fl validator.FieldLevel) bool {\n") + handlerContent.WriteString(" return models.IsValidStatus(fl.Field().String())\n") + handlerContent.WriteString("}\n\n") + + // Handler struct + handlerContent.WriteString("// " + data.Name + "Handler handles " + data.NameLower + " services\n") + handlerContent.WriteString("type " + data.Name + "Handler struct {\n") + handlerContent.WriteString(" db database.Service\n") + handlerContent.WriteString("}\n\n") + + // Constructor + handlerContent.WriteString("// New" + data.Name + "Handler creates a new " + data.Name + "Handler\n") + handlerContent.WriteString("func New" + data.Name + "Handler() *" + data.Name + "Handler {\n") + handlerContent.WriteString(" return &" + data.Name + "Handler{\n") + handlerContent.WriteString(" db: " + data.NameLower + "db,\n") + handlerContent.WriteString(" }\n") + handlerContent.WriteString("}\n") + + // Add optional methods + if data.HasGet { + handlerContent.WriteString(generateGetMethods(data)) + } + if data.HasDynamic { + handlerContent.WriteString(generateDynamicMethod(data)) + } + if data.HasSearch { + handlerContent.WriteString(generateSearchMethod(data)) + } + if data.HasPost { + handlerContent.WriteString(generateCreateMethod(data)) + } + if data.HasPut { + handlerContent.WriteString(generateUpdateMethod(data)) + } + if data.HasDelete { + handlerContent.WriteString(generateDeleteMethod(data)) + } + if data.HasStats { + handlerContent.WriteString(generateStatsMethod(data)) + } + + // Add helper methods + handlerContent.WriteString(generateHelperMethods(data)) + + // Write into file + writeFile(filepath.Join(handlerDir, data.NameLower+".go"), handlerContent.String()) +} + +func generateGetMethods(data HandlerData) string { + return ` + +// Get` + data.Name + ` godoc +// @Summary Get ` + data.NameLower + ` with pagination and optional aggregation +// @Description Returns a paginated list of ` + data.NamePlural + ` with optional summary statistics +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param limit query int false "Limit (max 100)" default(10) +// @Param offset query int false "Offset" default(0) +// @Param include_summary query bool false "Include aggregation summary" default(false) +// @Param status query string false "Filter by status" +// @Param search query string false "Search in multiple fields" +// @Success 200 {object} ` + data.Category + `.` + data.Name + `GetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NamePlural + ` [get] +func (h *` + data.Name + `Handler) Get` + data.Name + `(c *gin.Context) { + // Parse pagination parameters + limit, offset, err := h.parsePaginationParams(c) + if err != nil { + h.respondError(c, "Invalid pagination parameters", err, http.StatusBadRequest) + return + } + + // Parse filter parameters + filter := h.parseFilterParams(c) + includeAggregation := c.Query("include_summary") == "true" + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Create context with timeout + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute concurrent operations + var ( + items []` + data.Category + `.` + data.Name + ` + total int + aggregateData *models.AggregateData + wg sync.WaitGroup + errChan = make(chan error, 3) + mu sync.Mutex + ) + + // Fetch total count + wg.Add(1) + go func() { + defer wg.Done() + if err := h.getTotalCount(ctx, dbConn, filter, &total); err != nil { + mu.Lock() + errChan <- fmt.Errorf("failed to get total count: %w", err) + mu.Unlock() + } + }() + + // Fetch main data + wg.Add(1) + go func() { + defer wg.Done() + result, err := h.fetch` + data.Name + `s(ctx, dbConn, filter, limit, offset) + mu.Lock() + if err != nil { + errChan <- fmt.Errorf("failed to fetch data: %w", err) + } else { + items = result + } + mu.Unlock() + }() + + // Fetch aggregation data if requested + if includeAggregation { + wg.Add(1) + go func() { + defer wg.Done() + result, err := h.getAggregateData(ctx, dbConn, filter) + mu.Lock() + if err != nil { + errChan <- fmt.Errorf("failed to get aggregate data: %w", err) + } else { + aggregateData = result + } + mu.Unlock() + }() + } + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + h.logAndRespondError(c, "Data processing failed", err, http.StatusInternalServerError) + return + } + } + + // Build response + meta := h.calculateMeta(limit, offset, total) + response := ` + data.Category + `.` + data.Name + `GetResponse{ + Message: "Data ` + data.Category + ` berhasil diambil", + Data: items, + Meta: meta, + } + + if includeAggregation && aggregateData != nil { + response.Summary = aggregateData + } + + c.JSON(http.StatusOK, response) +} + +// Get` + data.Name + `ByID godoc +// @Summary Get ` + data.Name + ` by ID +// @Description Returns a single ` + data.NameLower + ` by ID +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param id path string true "` + data.Name + ` ID (UUID)" +// @Success 200 {object} ` + data.Category + `.` + data.Name + `GetByIDResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "` + data.Name + ` not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NameLower + `/{id} [get] +func (h *` + data.Name + `Handler) Get` + data.Name + `ByID(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + item, err := h.get` + data.Name + `ByID(ctx, dbConn, id) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "` + data.Name + ` not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to get ` + data.NameLower + `", err, http.StatusInternalServerError) + } + return + } + + response := ` + data.Category + `.` + data.Name + `GetByIDResponse{ + Message: "` + data.Category + ` details retrieved successfully", + Data: item, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateDynamicMethod(data HandlerData) string { + return ` + +// Get` + data.Name + `Dynamic godoc +// @Summary Get ` + data.NameLower + ` with dynamic filtering +// @Description Returns ` + data.NamePlural + ` with advanced dynamic filtering like Directus +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param fields query string false "Fields to select (e.g., fields=*.*)" +// @Param filter[column][operator] query string false "Dynamic filters (e.g., filter[name][_eq]=value)" +// @Param sort query string false "Sort fields (e.g., sort=date_created,-name)" +// @Param limit query int false "Limit" default(10) +// @Param offset query int false "Offset" default(0) +// @Success 200 {object} ` + data.Category + `.` + data.Name + `GetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NamePlural + `/dynamic [get] +func (h *` + data.Name + `Handler) Get` + data.Name + `Dynamic(c *gin.Context) { + // Parse query parameters + parser := utils.NewQueryParser().SetLimits(10, 100) + dynamicQuery, err := parser.ParseQuery(c.Request.URL.Query()) + if err != nil { + h.respondError(c, "Invalid query parameters", err, http.StatusBadRequest) + return + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Create context with timeout + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute query with dynamic filtering + items, total, err := h.fetch` + data.Name + `sDynamic(ctx, dbConn, dynamicQuery) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, total) + response := ` + data.Category + `.` + data.Name + `GetResponse{ + Message: "Data ` + data.Category + ` berhasil diambil", + Data: items, + Meta: meta, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateSearchMethod(data HandlerData) string { + return ` + +// Search` + data.Name + `Advanced provides advanced search capabilities +func (h *` + data.Name + `Handler) Search` + data.Name + `Advanced(c *gin.Context) { + // Parse complex search parameters + searchQuery := c.Query("q") + if searchQuery == "" { + h.respondError(c, "Search query is required", fmt.Errorf("empty search query"), http.StatusBadRequest) + return + } + + // Build dynamic query for search + query := utils.DynamicQuery{ + Fields: []string{"*"}, + Filters: []utils.FilterGroup{{ + Filters: []utils.DynamicFilter{ + { + Column: "status", + Operator: utils.OpNotEqual, + Value: "deleted", + }, + { + Column: "name", + Operator: utils.OpContains, + Value: searchQuery, + LogicOp: "OR", + }, + }, + LogicOp: "AND", + }}, + Sort: []utils.SortField{{ + Column: "date_created", + Order: "DESC", + }}, + Limit: 20, + Offset: 0, + } + + // Parse pagination if provided + if limit := c.Query("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { + query.Limit = l + } + } + if offset := c.Query("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Get database connection + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Execute search + items, total, err := h.fetch` + data.Name + `sDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Search failed", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, total) + response := ` + data.Category + `.` + data.Name + `GetResponse{ + Message: fmt.Sprintf("Search results for '%s'", searchQuery), + Data: items, + Meta: meta, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateCreateMethod(data HandlerData) string { + return ` + +// Create` + data.Name + ` godoc +// @Summary Create ` + data.NameLower + ` +// @Description Creates a new ` + data.NameLower + ` record +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param request body ` + data.Category + `.` + data.Name + `CreateRequest true "` + data.Name + ` creation request" +// @Success 201 {object} ` + data.Category + `.` + data.Name + `CreateResponse "` + data.Name + ` created successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NamePlural + ` [post] +func (h *` + data.Name + `Handler) Create` + data.Name + `(c *gin.Context) { + var req ` + data.Category + `.` + data.Name + `CreateRequest + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + + // Validate request + if err := ` + data.NameLower + `validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + // Validate duplicate and daily submission + if err := h.validate` + data.Name + `Submission(ctx, dbConn, &req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + item, err := h.create` + data.Name + `(ctx, dbConn, &req) + if err != nil { + h.logAndRespondError(c, "Failed to create ` + data.NameLower + `", err, http.StatusInternalServerError) + return + } + + response := ` + data.Category + `.` + data.Name + `CreateResponse{ + Message: "` + data.Name + ` berhasil dibuat", + Data: item, + } + + c.JSON(http.StatusCreated, response) +}` +} + +func generateUpdateMethod(data HandlerData) string { + return ` + +// Update` + data.Name + ` godoc +// @Summary Update ` + data.NameLower + ` +// @Description Updates an existing ` + data.NameLower + ` record +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param id path string true "` + data.Name + ` ID (UUID)" +// @Param request body ` + data.Category + `.` + data.Name + `UpdateRequest true "` + data.Name + ` update request" +// @Success 200 {object} ` + data.Category + `.` + data.Name + `UpdateResponse "` + data.Name + ` updated successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 404 {object} models.ErrorResponse "` + data.Name + ` not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NameLower + `/{id} [put] +func (h *` + data.Name + `Handler) Update` + data.Name + `(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + var req ` + data.Category + `.` + data.Name + `UpdateRequest + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + + // Set ID from path parameter + req.ID = id + + // Validate request + if err := ` + data.NameLower + `validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + item, err := h.update` + data.Name + `(ctx, dbConn, &req) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "` + data.Name + ` not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to update ` + data.NameLower + `", err, http.StatusInternalServerError) + } + return + } + + response := ` + data.Category + `.` + data.Name + `UpdateResponse{ + Message: "` + data.Name + ` berhasil diperbarui", + Data: item, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateDeleteMethod(data HandlerData) string { + return ` + +// Delete` + data.Name + ` godoc +// @Summary Delete ` + data.NameLower + ` +// @Description Soft deletes a ` + data.NameLower + ` by setting status to 'deleted' +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param id path string true "` + data.Name + ` ID (UUID)" +// @Success 200 {object} ` + data.Category + `.` + data.Name + `DeleteResponse "` + data.Name + ` deleted successfully" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "` + data.Name + ` not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NameLower + `/{id} [delete] +func (h *` + data.Name + `Handler) Delete` + data.Name + `(c *gin.Context) { + id := c.Param("id") + + // Validate UUID format + if _, err := uuid.Parse(id); err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + err = h.delete` + data.Name + `(ctx, dbConn, id) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "` + data.Name + ` not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to delete ` + data.NameLower + `", err, http.StatusInternalServerError) + } + return + } + + response := ` + data.Category + `.` + data.Name + `DeleteResponse{ + Message: "` + data.Name + ` berhasil dihapus", + ID: id, + } + + c.JSON(http.StatusOK, response) +}` +} + +func generateStatsMethod(data HandlerData) string { + return ` + +// Get` + data.Name + `Stats godoc +// @Summary Get ` + data.NameLower + ` statistics +// @Description Returns comprehensive statistics about ` + data.NameLower + ` data +// @Tags ` + data.Name + ` +// @Accept json +// @Produce json +// @Param status query string false "Filter statistics by status" +// @Success 200 {object} models.AggregateData "Statistics data" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/` + data.NamePlural + `/stats [get] +func (h *` + data.Name + `Handler) Get` + data.Name + `Stats(c *gin.Context) { + dbConn, err := h.db.GetDB("postgres_satudata") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + filter := h.parseFilterParams(c) + aggregateData, err := h.getAggregateData(ctx, dbConn, filter) + if err != nil { + h.logAndRespondError(c, "Failed to get statistics", err, http.StatusInternalServerError) + return + } + + c.JSON(http.StatusOK, gin.H{ + "message": "Statistik ` + data.NameLower + ` berhasil diambil", + "data": aggregateData, + }) +}` +} + +func generateHelperMethods(data HandlerData) string { + helperMethods := ` + +// Database operations +func (h *` + data.Name + `Handler) get` + data.Name + `ByID(ctx context.Context, dbConn *sql.DB, id string) (*` + data.Category + `.` + data.Name + `, error) { + query := "SELECT id, status, sort, user_created, date_created, user_updated, date_updated, name FROM ` + data.TableName + ` WHERE id = $1 AND status != 'deleted'" + row := dbConn.QueryRowContext(ctx, query, id) + + var item ` + data.Category + `.` + data.Name + ` + err := row.Scan(&item.ID, &item.Status, &item.Sort, &item.UserCreated, &item.DateCreated, &item.UserUpdated, &item.DateUpdated, &item.Name) + if err != nil { + return nil, err + } + + return &item, nil +} + +func (h *` + data.Name + `Handler) create` + data.Name + `(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `.` + data.Name + `CreateRequest) (*` + data.Category + `.` + data.Name + `, error) { + id := uuid.New().String() + now := time.Now() + + query := "INSERT INTO ` + data.TableName + ` (id, status, date_created, date_updated, name) VALUES ($1, $2, $3, $4, $5) RETURNING id, status, sort, user_created, date_created, user_updated, date_updated, name" + row := dbConn.QueryRowContext(ctx, query, id, req.Status, now, now, req.Name) + + var item ` + data.Category + `.` + data.Name + ` + err := row.Scan(&item.ID, &item.Status, &item.Sort, &item.UserCreated, &item.DateCreated, &item.UserUpdated, &item.DateUpdated, &item.Name) + if err != nil { + return nil, fmt.Errorf("failed to create ` + data.NameLower + `: %w", err) + } + + return &item, nil +} + +func (h *` + data.Name + `Handler) update` + data.Name + `(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `.` + data.Name + `UpdateRequest) (*` + data.Category + `.` + data.Name + `, error) { + now := time.Now() + + query := "UPDATE ` + data.TableName + ` SET status = $2, date_updated = $3, name = $4 WHERE id = $1 AND status != 'deleted' RETURNING id, status, sort, user_created, date_created, user_updated, date_updated, name" + row := dbConn.QueryRowContext(ctx, query, req.ID, req.Status, now, req.Name) + + var item ` + data.Category + `.` + data.Name + ` + err := row.Scan(&item.ID, &item.Status, &item.Sort, &item.UserCreated, &item.DateCreated, &item.UserUpdated, &item.DateUpdated, &item.Name) + if err != nil { + return nil, fmt.Errorf("failed to update ` + data.NameLower + `: %w", err) + } + + return &item, nil +} + +func (h *` + data.Name + `Handler) delete` + data.Name + `(ctx context.Context, dbConn *sql.DB, id string) error { + now := time.Now() + query := "UPDATE ` + data.TableName + ` SET status = 'deleted', date_updated = $2 WHERE id = $1 AND status != 'deleted'" + + result, err := dbConn.ExecContext(ctx, query, id, now) + if err != nil { + return fmt.Errorf("failed to delete ` + data.NameLower + `: %w", err) + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("failed to get affected rows: %w", err) + } + + if rowsAffected == 0 { + return sql.ErrNoRows + } + + return nil +} + +func (h *` + data.Name + `Handler) fetch` + data.Name + `s(ctx context.Context, dbConn *sql.DB, filter ` + data.Category + `.` + data.Name + `Filter, limit, offset int) ([]` + data.Category + `.` + data.Name + `, error) { + whereClause, args := h.buildWhereClause(filter) + query := fmt.Sprintf("SELECT id, status, sort, user_created, date_created, user_updated, date_updated, name FROM ` + data.TableName + ` WHERE %s ORDER BY date_created DESC NULLS LAST LIMIT $%d OFFSET $%d", whereClause, len(args)+1, len(args)+2) + args = append(args, limit, offset) + + rows, err := dbConn.QueryContext(ctx, query, args...) + if err != nil { + return nil, fmt.Errorf("fetch ` + data.NamePlural + ` query failed: %w", err) + } + defer rows.Close() + + items := make([]` + data.Category + `.` + data.Name + `, 0, limit) + for rows.Next() { + item, err := h.scan` + data.Name + `(rows) + if err != nil { + return nil, fmt.Errorf("scan ` + data.Name + ` failed: %w", err) + } + items = append(items, item) + } + + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("rows iteration error: %w", err) + } + + log.Printf("Successfully fetched %d ` + data.NamePlural + ` with filters applied", len(items)) + return items, nil +}` + + // Add dynamic fetch method if needed + if data.HasDynamic { + helperMethods += ` + +// fetchRetribusisDynamic executes dynamic query +func (h *` + data.Name + `Handler) fetch` + data.Name + `sDynamic(ctx context.Context, dbConn *sql.DB, query utils.DynamicQuery) ([]` + data.Category + `.` + data.Name + `, int, error) { + // Setup query builder + builder := utils.NewQueryBuilder("` + data.TableName + `"). + SetAllowedColumns([]string{ + "id", "status", "sort", "user_created", "date_created", + "user_updated", "date_updated", "name", + }) + + // Add default filter to exclude deleted records + query.Filters = append([]utils.FilterGroup{{ + Filters: []utils.DynamicFilter{{ + Column: "status", + Operator: utils.OpNotEqual, + Value: "deleted", + }}, + LogicOp: "AND", + }}, query.Filters...) + + // Execute concurrent queries + var ( + items [] ` + data.Category + `.` + data.Name + ` + total int + wg sync.WaitGroup + errChan = make(chan error, 2) + mu sync.Mutex + ) + + // Fetch total count + wg.Add(1) + go func() { + defer wg.Done() + countQuery := query + countQuery.Limit = 0 + countQuery.Offset = 0 + countSQL, countArgs, err := builder.BuildCountQuery(countQuery) + if err != nil { + errChan <- fmt.Errorf("failed to build count query: %w", err) + return + } + if err := dbConn.QueryRowContext(ctx, countSQL, countArgs...).Scan(&total); err != nil { + errChan <- fmt.Errorf("failed to get total count: %w", err) + return + } + }() + + // Fetch main data + wg.Add(1) + go func() { + defer wg.Done() + mainSQL, mainArgs, err := builder.BuildQuery(query) + if err != nil { + errChan <- fmt.Errorf("failed to build main query: %w", err) + return + } + + rows, err := dbConn.QueryContext(ctx, mainSQL, mainArgs...) + if err != nil { + errChan <- fmt.Errorf("failed to execute main query: %w", err) + return + } + defer rows.Close() + + var results []` + data.Category + `.` + data.Name + ` + for rows.Next() { + item, err := h.scan` + data.Name + `(rows) + if err != nil { + errChan <- fmt.Errorf("failed to scan ` + data.NameLower + `: %w", err) + return + } + results = append(results, item) + } + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("rows iteration error: %w", err) + return + } + + mu.Lock() + items = results + mu.Unlock() + }() + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + return nil, 0, err + } + } + + return items, total, nil +} +` + } + + helperMethods += ` +// Optimized scanning function +func (h *` + data.Name + `Handler) scan` + data.Name + `(rows *sql.Rows) (` + data.Category + `.` + data.Name + `, error) { + var item ` + data.Category + `.` + data.Name + ` + + // Scan into individual fields to handle nullable types properly + err := rows.Scan( + &item.ID, + &item.Status, + &item.Sort.Int32, &item.Sort.Valid, // models.NullableInt32 + &item.UserCreated.String, &item.UserCreated.Valid, // sql.NullString + &item.DateCreated.Time, &item.DateCreated.Valid, // sql.NullTime + &item.UserUpdated.String, &item.UserUpdated.Valid, // sql.NullString + &item.DateUpdated.Time, &item.DateUpdated.Valid, // sql.NullTime + &item.Name.String, &item.Name.Valid, // sql.NullString + ) + + return item, err +} + +func (h *` + data.Name + `Handler) getTotalCount(ctx context.Context, dbConn *sql.DB, filter ` + data.Category + `.` + data.Name + `Filter, total *int) error { + whereClause, args := h.buildWhereClause(filter) + countQuery := fmt.Sprintf("SELECT COUNT(*) FROM ` + data.TableName + ` WHERE %s", whereClause) + if err := dbConn.QueryRowContext(ctx, countQuery, args...).Scan(total); err != nil { + return fmt.Errorf("total count query failed: %w", err) + } + return nil +} + +// Get comprehensive aggregate data dengan filter support +func (h *` + data.Name + `Handler) getAggregateData(ctx context.Context, dbConn *sql.DB, filter ` + data.Category + `.` + data.Name + `Filter) (*models.AggregateData, error) { + aggregate := &models.AggregateData{ + ByStatus: make(map[string]int), + } + + // Build where clause untuk filter + whereClause, args := h.buildWhereClause(filter) + + // Use concurrent execution untuk performance + var wg sync.WaitGroup + var mu sync.Mutex + errChan := make(chan error, 4) + + // 1. Count by status + wg.Add(1) + go func() { + defer wg.Done() + statusQuery := fmt.Sprintf("SELECT status, COUNT(*) FROM ` + data.TableName + ` WHERE %s GROUP BY status ORDER BY status", whereClause) + + rows, err := dbConn.QueryContext(ctx, statusQuery, args...) + if err != nil { + errChan <- fmt.Errorf("status query failed: %w", err) + return + } + defer rows.Close() + + mu.Lock() + for rows.Next() { + var status string + var count int + if err := rows.Scan(&status, &count); err != nil { + mu.Unlock() + errChan <- fmt.Errorf("status scan failed: %w", err) + return + } + aggregate.ByStatus[status] = count + switch status { + case "active": + aggregate.TotalActive = count + case "draft": + aggregate.TotalDraft = count + case "inactive": + aggregate.TotalInactive = count + } + } + mu.Unlock() + + if err := rows.Err(); err != nil { + errChan <- fmt.Errorf("status iteration error: %w", err) + } + }() + + // 2. Get last updated time dan today statistics + wg.Add(1) + go func() { + defer wg.Done() + + // Last updated + lastUpdatedQuery := fmt.Sprintf("SELECT MAX(date_updated) FROM ` + data.TableName + ` WHERE %s AND date_updated IS NOT NULL", whereClause) + var lastUpdated sql.NullTime + if err := dbConn.QueryRowContext(ctx, lastUpdatedQuery, args...).Scan(&lastUpdated); err != nil { + errChan <- fmt.Errorf("last updated query failed: %w", err) + return + } + + // Today statistics + today := time.Now().Format("2006-01-02") + todayStatsQuery := fmt.Sprintf(` + "`" + ` + SELECT + SUM(CASE WHEN DATE(date_created) = $%d THEN 1 ELSE 0 END) as created_today, + SUM(CASE WHEN DATE(date_updated) = $%d AND DATE(date_created) != $%d THEN 1 ELSE 0 END) as updated_today + FROM ` + data.TableName + ` + WHERE %s` + "`" + `, len(args)+1, len(args)+1, len(args)+1, whereClause) + + todayArgs := append(args, today) + var createdToday, updatedToday int + if err := dbConn.QueryRowContext(ctx, todayStatsQuery, todayArgs...).Scan(&createdToday, &updatedToday); err != nil { + errChan <- fmt.Errorf("today stats query failed: %w", err) + return + } + + mu.Lock() + if lastUpdated.Valid { + aggregate.LastUpdated = &lastUpdated.Time + } + aggregate.CreatedToday = createdToday + aggregate.UpdatedToday = updatedToday + mu.Unlock() + }() + + // Wait for all goroutines + wg.Wait() + close(errChan) + + // Check for errors + for err := range errChan { + if err != nil { + return nil, err + } + } + + return aggregate, nil +} + +// Enhanced error handling +func (h *` + data.Name + `Handler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) { + log.Printf("[ERROR] %s: %v", message, err) + h.respondError(c, message, err, statusCode) +} + +func (h *` + data.Name + `Handler) respondError(c *gin.Context, message string, err error, statusCode int) { + errorMessage := message + if gin.Mode() == gin.ReleaseMode { + errorMessage = "Internal server error" + } + + c.JSON(statusCode, models.ErrorResponse{ + Error: errorMessage, + Code: statusCode, + Message: err.Error(), + Timestamp: time.Now(), + }) +} + +// Parse pagination parameters dengan validation yang lebih ketat +func (h *` + data.Name + `Handler) parsePaginationParams(c *gin.Context) (int, int, error) { + limit := 10 // Default limit + offset := 0 // Default offset + + if limitStr := c.Query("limit"); limitStr != "" { + parsedLimit, err := strconv.Atoi(limitStr) + if err != nil { + return 0, 0, fmt.Errorf("invalid limit parameter: %s", limitStr) + } + if parsedLimit <= 0 { + return 0, 0, fmt.Errorf("limit must be greater than 0") + } + if parsedLimit > 100 { + return 0, 0, fmt.Errorf("limit cannot exceed 100") + } + limit = parsedLimit + } + + if offsetStr := c.Query("offset"); offsetStr != "" { + parsedOffset, err := strconv.Atoi(offsetStr) + if err != nil { + return 0, 0, fmt.Errorf("invalid offset parameter: %s", offsetStr) + } + if parsedOffset < 0 { + return 0, 0, fmt.Errorf("offset cannot be negative") + } + offset = parsedOffset + } + + log.Printf("Pagination - Limit: %d, Offset: %d", limit, offset) + return limit, offset, nil +} + +func (h *` + data.Name + `Handler) parseFilterParams(c *gin.Context) ` + data.Category + `.` + data.Name + `Filter { + filter := ` + data.Category + `.` + data.Name + `Filter{} + + if status := c.Query("status"); status != "" { + if models.IsValidStatus(status) { + filter.Status = &status + } + } + + if search := c.Query("search"); search != "" { + filter.Search = &search + } + + // Parse date filters + if dateFromStr := c.Query("date_from"); dateFromStr != "" { + if dateFrom, err := time.Parse("2006-01-02", dateFromStr); err == nil { + filter.DateFrom = &dateFrom + } + } + + if dateToStr := c.Query("date_to"); dateToStr != "" { + if dateTo, err := time.Parse("2006-01-02", dateToStr); err == nil { + filter.DateTo = &dateTo + } + } + + return filter +} + +// Build WHERE clause dengan filter parameters +func (h *` + data.Name + `Handler) buildWhereClause(filter ` + data.Category + `.` + data.Name + `Filter) (string, []interface{}) { + conditions := []string{"status != 'deleted'"} + args := []interface{}{} + paramCount := 1 + + if filter.Status != nil { + conditions = append(conditions, fmt.Sprintf("status = $%d", paramCount)) + args = append(args, *filter.Status) + paramCount++ + } + + if filter.Search != nil { + searchCondition := fmt.Sprintf("name ILIKE $%d", paramCount) + conditions = append(conditions, searchCondition) + searchTerm := "%" + *filter.Search + "%" + args = append(args, searchTerm) + paramCount++ + } + + if filter.DateFrom != nil { + conditions = append(conditions, fmt.Sprintf("date_created >= $%d", paramCount)) + args = append(args, *filter.DateFrom) + paramCount++ + } + + if filter.DateTo != nil { + conditions = append(conditions, fmt.Sprintf("date_created <= $%d", paramCount)) + args = append(args, filter.DateTo.Add(24*time.Hour-time.Nanosecond)) + paramCount++ + } + + return strings.Join(conditions, " AND "), args +} + +func (h *` + data.Name + `Handler) calculateMeta(limit, offset, total int) models.MetaResponse { + totalPages := 0 + currentPage := 1 + if limit > 0 { + totalPages = (total + limit - 1) / limit // Ceiling division + currentPage = (offset / limit) + 1 + } + + return models.MetaResponse{ + Limit: limit, + Offset: offset, + Total: total, + TotalPages: totalPages, + CurrentPage: currentPage, + HasNext: offset+limit < total, + HasPrev: offset > 0, + } +} + +// validate` + data.Name + `Submission performs validation for duplicate entries and daily submission limits +func (h *` + data.Name + `Handler) validate` + data.Name + `Submission(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `.` + data.Name + `CreateRequest) error { + // Import the validation utility + validator := validation.NewDuplicateValidator(dbConn) + + // Use default configuration + config := validation.ValidationConfig{ + TableName: "` + data.TableName + `", + IDColumn: "id", + StatusColumn: "status", + DateColumn: "date_created", + ActiveStatuses: []string{"active", "draft"}, + } + + // Validate duplicate entries with active status for today + err := validator.ValidateDuplicate(ctx, config, "dummy_id") + if err != nil { + return fmt.Errorf("validation failed: %w", err) + } + + // Validate once per day submission + err = validator.ValidateOncePerDay(ctx, "` + data.TableName + `", "id", "date_created", "daily_limit") + if err != nil { + return fmt.Errorf("daily submission limit exceeded: %w", err) + } + + return nil +} + +// Example usage of the validation utility with custom configuration +func (h *` + data.Name + `Handler) validateWithCustomConfig(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `.` + data.Name + `CreateRequest) error { + // Create validator instance + validator := validation.NewDuplicateValidator(dbConn) + + // Use custom configuration + config := validation.ValidationConfig{ + TableName: "` + data.TableName + `", + IDColumn: "id", + StatusColumn: "status", + DateColumn: "date_created", + ActiveStatuses: []string{"active", "draft"}, + AdditionalFields: map[string]interface{}{ + "name": req.Name, + }, + } + + // Validate with custom fields + fields := map[string]interface{}{ + "name": *req.Name, + } + + err := validator.ValidateDuplicateWithCustomFields(ctx, config, fields) + if err != nil { + return fmt.Errorf("custom validation failed: %w", err) + } + + return nil +} + +// GetLastSubmissionTime example +func (h *` + data.Name + `Handler) getLastSubmissionTimeExample(ctx context.Context, dbConn *sql.DB, identifier string) (*time.Time, error) { + validator := validation.NewDuplicateValidator(dbConn) + return validator.GetLastSubmissionTime(ctx, "` + data.TableName + `", "id", "date_created", identifier) +}` + + return helperMethods +} + +// Keep existing functions for model generation and routes... +// (The remaining functions stay the same as in the original file) + +// ================= MODEL GENERATION ===================== +func generateModelFile(data HandlerData, modelDir string) { + // Tentukan import block + var importBlock, nullablePrefix string + + if data.Category == "models" { + importBlock = `import ( + "database/sql" + "encoding/json" + "time" +) +` + } else { + nullablePrefix = "models." + importBlock = `import ( + "` + data.ModuleName + `/internal/models" + "database/sql" + "encoding/json" + "time" +) +` + } + + modelContent := `package ` + data.Category + ` + +` + importBlock + ` + +// ` + data.Name + ` represents the data structure for the ` + data.NameLower + ` table +// with proper null handling and optimized JSON marshaling +type ` + data.Name + ` struct { + ID string ` + "`json:\"id\" db:\"id\"`" + ` + Status string ` + "`json:\"status\" db:\"status\"`" + ` + Sort ` + nullablePrefix + "NullableInt32 `json:\"sort,omitempty\" db:\"sort\"`" + ` + UserCreated sql.NullString ` + "`json:\"user_created,omitempty\" db:\"user_created\"`" + ` + DateCreated sql.NullTime ` + "`json:\"date_created,omitempty\" db:\"date_created\"`" + ` + UserUpdated sql.NullString ` + "`json:\"user_updated,omitempty\" db:\"user_updated\"`" + ` + DateUpdated sql.NullTime ` + "`json:\"date_updated,omitempty\" db:\"date_updated\"`" + ` + Name sql.NullString ` + "`json:\"name,omitempty\" db:\"name\"`" + ` +} + +// Custom JSON marshaling untuk ` + data.Name + ` agar NULL values tidak muncul di response +func (r ` + data.Name + `) MarshalJSON() ([]byte, error) { + type Alias ` + data.Name + ` + aux := &struct { + Sort *int ` + "`json:\"sort,omitempty\"`" + ` + UserCreated *string ` + "`json:\"user_created,omitempty\"`" + ` + DateCreated *time.Time ` + "`json:\"date_created,omitempty\"`" + ` + UserUpdated *string ` + "`json:\"user_updated,omitempty\"`" + ` + DateUpdated *time.Time ` + "`json:\"date_updated,omitempty\"`" + ` + Name *string ` + "`json:\"name,omitempty\"`" + ` + *Alias + }{ + Alias: (*Alias)(&r), + } + + if r.Sort.Valid { + sort := int(r.Sort.Int32) + aux.Sort = &sort + } + if r.UserCreated.Valid { + aux.UserCreated = &r.UserCreated.String + } + if r.DateCreated.Valid { + aux.DateCreated = &r.DateCreated.Time + } + if r.UserUpdated.Valid { + aux.UserUpdated = &r.UserUpdated.String + } + if r.DateUpdated.Valid { + aux.DateUpdated = &r.DateUpdated.Time + } + if r.Name.Valid { + aux.Name = &r.Name.String + } + return json.Marshal(aux) +} + +// Helper methods untuk mendapatkan nilai yang aman +func (r *` + data.Name + `) GetName() string { + if r.Name.Valid { + return r.Name.String + } + return "" +} +` + + // Add request/response structs based on enabled methods + if data.HasGet { + modelContent += ` + +// Response struct untuk GET by ID +type ` + data.Name + `GetByIDResponse struct { + Message string ` + "`json:\"message\"`" + ` + Data *` + data.Name + ` ` + "`json:\"data\"`" + ` +} + +// Enhanced GET response dengan pagination dan aggregation +type ` + data.Name + `GetResponse struct { + Message string ` + "`json:\"message\"`" + ` + Data []` + data.Name + ` ` + "`json:\"data\"`" + ` + Meta ` + nullablePrefix + "MetaResponse `json:\"meta\"`" + ` + Summary *` + nullablePrefix + "AggregateData `json:\"summary,omitempty\"`" + ` +} +` + } + if data.HasPost { + modelContent += ` + +// Request struct untuk create +type ` + data.Name + `CreateRequest struct { + Status string ` + "`json:\"status\" validate:\"required,oneof=draft active inactive\"`" + ` + Name *string ` + "`json:\"name,omitempty\" validate:\"omitempty,min=1,max=255\"`" + ` +} + +// Response struct untuk create +type ` + data.Name + `CreateResponse struct { + Message string ` + "`json:\"message\"`" + ` + Data *` + data.Name + ` ` + "`json:\"data\"`" + ` +} +` + } + if data.HasPut { + modelContent += ` + +// Update request +type ` + data.Name + `UpdateRequest struct { + ID string ` + "`json:\"-\" validate:\"required,uuid4\"`" + ` + Status string ` + "`json:\"status\" validate:\"required,oneof=draft active inactive\"`" + ` + Name *string ` + "`json:\"name,omitempty\" validate:\"omitempty,min=1,max=255\"`" + ` +} + +// Response struct untuk update +type ` + data.Name + `UpdateResponse struct { + Message string ` + "`json:\"message\"`" + ` + Data *` + data.Name + ` ` + "`json:\"data\"`" + ` +} +` + } + if data.HasDelete { + modelContent += ` + +// Response struct untuk delete +type ` + data.Name + `DeleteResponse struct { + Message string ` + "`json:\"message\"`" + ` + ID string ` + "`json:\"id\"`" + ` +} +` + } + // Add filter struct + modelContent += ` + +// Filter struct untuk query parameters +type ` + data.Name + `Filter struct { + Status *string ` + "`json:\"status,omitempty\" form:\"status\"`" + ` + Search *string ` + "`json:\"search,omitempty\" form:\"search\"`" + ` + DateFrom *time.Time ` + "`json:\"date_from,omitempty\" form:\"date_from\"`" + ` + DateTo *time.Time ` + "`json:\"date_to,omitempty\" form:\"date_to\"`" + ` +} +` + writeFile(filepath.Join(modelDir, data.NameLower+".go"), modelContent) +} + +// ================= ROUTES GENERATION ===================== +func updateRoutesFile(data HandlerData) { + routesFile := "internal/routes/v1/routes.go" + content, err := os.ReadFile(routesFile) + if err != nil { + fmt.Printf("โš ๏ธ Could not read routes.go: %v\n", err) + fmt.Printf("๐Ÿ“ Please manually add these routes to your routes.go file:\n") + printRoutesSample(data) + return + } + + routesContent := string(content) + + // Build import path + var importPath, importAlias string + if data.Category != "models" { + importPath = fmt.Sprintf("%s/internal/handlers/"+data.Category, data.ModuleName) + importAlias = data.Category + data.Name + "Handlers" + } else { + importPath = fmt.Sprintf("%s/internal/handlers", data.ModuleName) + importAlias = data.NameLower + "Handlers" + } + + // Add import + importPattern := fmt.Sprintf("%s \"%s\"", importAlias, importPath) + if !strings.Contains(routesContent, importPattern) { + importToAdd := fmt.Sprintf("\t%s \"%s\"", importAlias, importPath) + if strings.Contains(routesContent, "import (") { + routesContent = strings.Replace(routesContent, "import (", + "import (\n"+importToAdd, 1) + } + } + + // Build new routes in protected group format + newRoutes := generateProtectedRouteBlock(data) + + // Insert above protected routes marker + insertMarker := "// ============= PUBLISHED ROUTES ===============================================" + if strings.Contains(routesContent, insertMarker) { + if !strings.Contains(routesContent, fmt.Sprintf("New%sHandler", data.Name)) { + // Insert before the marker + routesContent = strings.Replace(routesContent, insertMarker, + newRoutes+"\n\t"+insertMarker, 1) + } else { + fmt.Printf("โœ… Routes for %s already exist, skipping...\n", data.Name) + return + } + } else { + // Fallback: insert at end of setupV1Routes function + setupFuncEnd := "\treturn r" + if strings.Contains(routesContent, setupFuncEnd) { + routesContent = strings.Replace(routesContent, setupFuncEnd, + newRoutes+"\n\n\t"+setupFuncEnd, 1) + } + } + + if err := os.WriteFile(routesFile, []byte(routesContent), 0644); err != nil { + fmt.Printf("Error writing routes.go: %v\n", err) + return + } + + fmt.Printf("โœ… Updated routes.go with %s endpoints\n", data.Name) +} + +func generateProtectedRouteBlock(data HandlerData) string { + // fmt.Printf("๐Ÿ“ Group Part: %s\n", groupPath) + var sb strings.Builder + var importPath, groupPath string + if data.Category != "models" { + importPath = data.Category + data.Name + groupPath = strings.ToLower(data.Category) + "/" + data.NameLower + } else { + importPath = data.NameLower + groupPath = data.NameLower + } + // Komentar dan deklarasi handler & grup + sb.WriteString("// ") + sb.WriteString(data.Name) + sb.WriteString(" endpoints\n") + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Handler := ") + sb.WriteString(importPath) + sb.WriteString("Handlers.New") + sb.WriteString(data.Name) + sb.WriteString("Handler()\n ") + sb.WriteString(importPath) + + sb.WriteString("Group := v1.Group(\"/") + sb.WriteString(groupPath) + sb.WriteString("\")\n {\n ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString(")\n") + + if data.HasDynamic { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"/dynamic\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("Dynamic) // Route baru\n") + } + if data.HasSearch { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"/search\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Search") + sb.WriteString(data.Name) + sb.WriteString("Advanced) // Route pencarian\n") + } + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"/:id\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("ByID)\n") + + if data.HasPost { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.POST(\"\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Create") + sb.WriteString(data.Name) + sb.WriteString(")\n") + } + if data.HasPut { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.PUT(\"/:id\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Update") + sb.WriteString(data.Name) + sb.WriteString(")\n") + } + if data.HasDelete { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.DELETE(\"/:id\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Delete") + sb.WriteString(data.Name) + sb.WriteString(")\n") + } + if data.HasStats { + sb.WriteString(" ") + sb.WriteString(importPath) + sb.WriteString("Group.GET(\"/stats\", ") + sb.WriteString(importPath) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("Stats)\n") + } + sb.WriteString(" }\n") + return sb.String() +} + +func printRoutesSample(data HandlerData) { + fmt.Print(generateProtectedRouteBlock(data)) + fmt.Println() +} + +// ================= UTILITY FUNCTIONS ===================== +func writeFile(filename, content string) { + if err := os.WriteFile(filename, []byte(content), 0644); err != nil { + fmt.Printf("โŒ Error creating file %s: %v\n", filename, err) + return + } + fmt.Printf("โœ… Generated: %s\n", filename) +}