commit 2bd26d91fb2e683173497cff87ea8abdca0bdaaa Author: effendy Date: Wed Nov 26 07:24:49 2025 +0000 first commit diff --git a/.air.toml b/.air.toml new file mode 100644 index 0000000..f127ea0 --- /dev/null +++ b/.air.toml @@ -0,0 +1,46 @@ +root = "." +testdata_dir = "testdata" +tmp_dir = "tmp" + +[build] + args_bin = [] + bin = ".\\main.exe" + cmd = "make build" + delay = 1000 + exclude_dir = ["assets", "tmp", "vendor", "testdata", "node_modules"] + exclude_file = [] + exclude_regex = ["_test.go"] + exclude_unchanged = false + follow_symlink = false + full_bin = "" + include_dir = [] + include_ext = ["go", "tpl", "tmpl", "html"] + include_file = [] + kill_delay = "0s" + log = "build-errors.log" + poll = false + poll_interval = 0 + post_cmd = [] + pre_cmd = [] + rerun = false + rerun_delay = 500 + send_interrupt = false + stop_on_error = false + +[color] + app = "" + build = "yellow" + main = "magenta" + runner = "green" + watcher = "cyan" + +[log] + main_only = false + time = false + +[misc] + clean_on_exit = false + +[screen] + clear_on_rebuild = false + keep_scroll = true diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 0000000..e001bf5 --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,42 @@ +version: 2 +before: + hooks: + - go mod tidy + +env: + - PACKAGE_PATH=github.com///cmd + +builds: +- binary: "{{ .ProjectName }}" + main: ./cmd/api + goos: + - darwin + - linux + - windows + goarch: + - amd64 + - arm64 + env: + - CGO_ENABLED=0 + ldflags: + - -s -w -X {{.Env.PACKAGE_PATH}}={{.Version}} +release: + prerelease: auto + +universal_binaries: +- replace: true + +archives: + - name_template: > + {{- .ProjectName }}_{{- .Version }}_{{- title .Os }}_{{- if eq .Arch "amd64" }}x86_64{{- else if eq .Arch "386" }}i386{{- else }}{{ .Arch }}{{ end }}{{- if .Arm }}v{{ .Arm }}{{ end -}} + format_overrides: + - goos: windows + format: zip + builds_info: + group: root + owner: root + files: + - README.md + +checksum: + name_template: 'checksums.txt' diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..4f13c6e --- /dev/null +++ b/Dockerfile @@ -0,0 +1,18 @@ +FROM golang:1.24.4-alpine AS build + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +RUN go build -o main cmd/api/main.go + +FROM alpine:3.20.1 AS prod +WORKDIR /app +COPY --from=build /app/main /app/main +EXPOSE 8070 +CMD ["./main"] + + diff --git a/README.md b/README.md new file mode 100644 index 0000000..7b73ca7 --- /dev/null +++ b/README.md @@ -0,0 +1,87 @@ +# ๐Ÿš€ API Service - Sistem Manajemen Pasien + +## ๐Ÿ“‘ Daftar Isi + +- [โœจ Fitur Utama](#-fitur-utama) +- [๐Ÿ—๏ธ Arsitektur](#๏ธ-arsitektur) +- [โšก Quick Start](#-quick-start) +- [๐Ÿ” Autentikasi](#-autentikasi) +- [๐Ÿ“Š API Endpoints](#-api-endpoints) +- [๐Ÿ› ๏ธ Development](#๏ธ-development) +- [๐Ÿš€ Deployment](#-deployment) +- [๐Ÿ“š Dokumentasi](#-dokumentasi) +- [๐Ÿšจ Troubleshooting](#-troubleshooting) + +*** + +## โœจ Fitur Utama + +### Core Features + +- **๐Ÿ”’ JWT Authentication** - Sistem autentikasi dengan Keycloak integration +- **๐Ÿ‘ฅ Patient Management** - CRUD lengkap untuk data pasien +- **๐Ÿ” Dynamic Filtering** - Filter dan pencarian data pasien secara dinamis +- **๐Ÿ“Š Advanced Search** - Pencarian dengan multiple fields dan operators +- **๐Ÿฅ BPJS Integration** - Integrasi dengan layanan kesehatan BPJS +- **๐Ÿฉบ SATUSEHAT Integration** - Integrasi dengan platform kesehatan SATUSEHAT +- **๐Ÿ“– API Documentation** - Swagger/OpenAPI yang interaktif + +### Developer Experience + +- **๐Ÿ”ฅ Hot Reload** - Development dengan auto-restart +- **๐Ÿณ Docker Ready** - Deployment yang mudah +- **โšก Code Generator** - Tools untuk generate handler dan model +- **๐Ÿงช Testing Suite** - Unit dan integration tests +- **๐Ÿ“Š Health Monitoring** - Monitoring kesehatan aplikasi +- **๐Ÿ—„๏ธ Multi Database** - Support PostgreSQL, MySQL, dan MongoDB + +*** + +## ๐Ÿ—๏ธ Arsitektur + +### Clean Architecture Layers + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Presentation Layer โ”‚ โ† handlers/, routes/ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ Application Layer โ”‚ โ† middleware/, services/ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ Domain Layer โ”‚ โ† models/, validation/ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ Infrastructure Layer โ”‚ โ† database/, external APIs +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +### Struktur Project + +``` +api-service/ +โ”œโ”€โ”€ ๐Ÿ“ cmd/ # Entry points aplikasi +โ”‚ โ””โ”€โ”€ api/main.go # API server +โ”œโ”€โ”€ ๐Ÿ“ internal/ # Core business logic +โ”‚ โ”œโ”€โ”€ config/ # Configuration management +โ”‚ โ”œโ”€โ”€ database/ # Database connections +โ”‚ โ”œโ”€โ”€ handlers/ # HTTP controllers +โ”‚ โ”œโ”€โ”€ middleware/ # Auth & validation middleware +โ”‚ โ”œโ”€โ”€ models/ # Data structures +โ”‚ โ”œโ”€โ”€ routes/ # API routing +โ”‚ โ”œโ”€โ”€ services/ # Business logic services +โ”‚ โ”‚ โ””โ”€โ”€ auth/ # Auth services +โ”‚ โ”œโ”€โ”€ utils/ # Utility functions +โ”‚ โ”‚ โ”œโ”€โ”€ filters/ # Dynamic filtering +โ”‚ โ”‚ โ””โ”€โ”€ validation/ # Data validation +โ”‚ โ””โ”€โ”€ server/ # HTTP server setup +โ”œโ”€โ”€ ๐Ÿ“ docs/ # Documentation +โ””โ”€โ”€ ๐Ÿ“ examples/ # Example files +``` + +### Best Practices + +1. **Jangan pernah commit file .env ke repository** +2. **Gunakan environment variables untuk semua data sensitif** +3. **Gunakan secret management tools untuk production** +4. **Rotasi kunci API secara berkala** +5. **Implementasi rate limiting untuk API endpoints** + +*** \ No newline at end of file diff --git a/cmd/api/main.go b/cmd/api/main.go new file mode 100644 index 0000000..8f4e5c3 --- /dev/null +++ b/cmd/api/main.go @@ -0,0 +1,86 @@ +package main + +import ( + "context" + "fmt" + "log" + "net/http" + "os/signal" + "syscall" + "time" + + "api-service/internal/server" + + "github.com/joho/godotenv" // Import the godotenv package + + _ "api-service/docs" +) + +// @title API Service +// @version 1.0.0 +// @description A comprehensive Go API service with Swagger documentation +// @termsOfService http://swagger.io/terms/ + +// @contact.name API Support +// @contact.url http://www.swagger.io/support +// @contact.email support@swagger.io + +// @license.name Apache 2.0 +// @license.url http://www.apache.org/licenses/LICENSE-2.0.html + +// @host localhost:8080 +// @BasePath /api/v1 +// @schemes http https + +func gracefulShutdown(apiServer *http.Server, done chan bool) { + // Create context that listens for the interrupt signal from the OS. + ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) + defer stop() + + // Listen for the interrupt signal. + <-ctx.Done() + + log.Println("shutting down gracefully, press Ctrl+C again to force") + stop() // Allow Ctrl+C to force shutdown + + // The context is used to inform the server it has 5 seconds to finish + // the request it is currently handling + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + if err := apiServer.Shutdown(ctx); err != nil { + log.Printf("Server forced to shutdown with error: %v", err) + } + + log.Println("Server exiting") + + // Notify the main goroutine that the shutdown is complete + done <- true +} + +func main() { + log.Println("Starting API Service...") + + // Load environment variables from .env file + if err := godotenv.Load(); err != nil { + log.Printf("Warning: .env file not found or could not be loaded: %v", err) + log.Println("Continuing with system environment variables...") + } + + server := server.NewServer() + + // Create a done channel to signal when the shutdown is complete + done := make(chan bool, 1) + + // Run graceful shutdown in a separate goroutine + go gracefulShutdown(server, done) + + log.Printf("Server starting on port %s", server.Addr) + err := server.ListenAndServe() + if err != nil && err != http.ErrServerClosed { + panic(fmt.Sprintf("http server error: %s", err)) + } + + // Wait for the graceful shutdown to complete + <-done + log.Println("Graceful shutdown complete.") +} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..8dd0cc7 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,35 @@ +services: + app: + container_name: general-test + build: + context: . + dockerfile: Dockerfile + target: prod + restart: unless-stopped + ports: + - "8070:8070" + environment: + # Server Configuration + PORT: 8070 + APP_ENV: production + GIN_MODE: release + + # SATUDATA Database Configuration (PostgreSQL) + # POSTGRES_SATUDATA_CONNECTION: postgres + # POSTGRES_SATUDATA_USERNAME: postgres + # POSTGRES_SATUDATA_PASSWORD: postgres + # POSTGRES_SATUDATA_HOST: host + # POSTGRES_SATUDATA_DATABASE: postgres + # POSTGRES_SATUDATA_PORT: 5432 + # POSTGRES_SATUDATA_SSLMODE: disable + + + # MYSQL Antrian Database + MYSQL_ANTRIAN_CONNECTION: mysql + MYSQL_ANTRIAN_HOST: host + MYSQL_ANTRIAN_USERNAME: mysql + MYSQL_ANTRIAN_PASSWORD: mysql + MYSQL_ANTRIAN_DATABASE: mysql + MYSQL_ANTRIAN_PORT: 3306 + MYSQL_ANTRIAN_SSLMODE: disable + diff --git a/docs/docs.go b/docs/docs.go new file mode 100644 index 0000000..69fb43a --- /dev/null +++ b/docs/docs.go @@ -0,0 +1,99 @@ +// Package docs Code generated by swaggo/swag. DO NOT EDIT +package docs + +import "github.com/swaggo/swag" + +const docTemplate = `{ + "schemes": {{ marshal .Schemes }}, + "swagger": "2.0", + "info": { + "description": "{{escape .Description}}", + "title": "{{.Title}}", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "API Support", + "url": "http://www.swagger.io/support", + "email": "support@swagger.io" + }, + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + }, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/api/v1/auth/me": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "Get information about the currently authenticated user", + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get current user info", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/api-service_internal_models_auth.User" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + } + }, + "definitions": { + "api-service_internal_models_auth.User": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "role": { + "type": "string" + }, + "username": { + "type": "string" + } + } + } + } +}` + +// SwaggerInfo holds exported Swagger Info so clients can modify it +var SwaggerInfo = &swag.Spec{ + Version: "1.0.0", + Host: "localhost:8080", + BasePath: "/api/v1", + Schemes: []string{"http", "https"}, + Title: "API Service", + Description: "A comprehensive Go API service with Swagger documentation", + InfoInstanceName: "swagger", + SwaggerTemplate: docTemplate, + LeftDelim: "{{", + RightDelim: "}}", +} + +func init() { + swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo) +} diff --git a/docs/swagger.json b/docs/swagger.json new file mode 100644 index 0000000..a850983 --- /dev/null +++ b/docs/swagger.json @@ -0,0 +1,79 @@ +{ + "schemes": [ + "http", + "https" + ], + "swagger": "2.0", + "info": { + "description": "A comprehensive Go API service with Swagger documentation", + "title": "API Service", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "API Support", + "url": "http://www.swagger.io/support", + "email": "support@swagger.io" + }, + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + }, + "version": "1.0.0" + }, + "host": "localhost:8080", + "basePath": "/api/v1", + "paths": { + "/api/v1/auth/me": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "Get information about the currently authenticated user", + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get current user info", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/api-service_internal_models_auth.User" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + } + }, + "definitions": { + "api-service_internal_models_auth.User": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "role": { + "type": "string" + }, + "username": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/docs/swagger.yaml b/docs/swagger.yaml new file mode 100644 index 0000000..44564d2 --- /dev/null +++ b/docs/swagger.yaml @@ -0,0 +1,52 @@ +basePath: /api/v1 +definitions: + api-service_internal_models_auth.User: + properties: + email: + type: string + id: + type: string + role: + type: string + username: + type: string + type: object +host: localhost:8080 +info: + contact: + email: support@swagger.io + name: API Support + url: http://www.swagger.io/support + description: A comprehensive Go API service with Swagger documentation + license: + name: Apache 2.0 + url: http://www.apache.org/licenses/LICENSE-2.0.html + termsOfService: http://swagger.io/terms/ + title: API Service + version: 1.0.0 +paths: + /api/v1/auth/me: + get: + description: Get information about the currently authenticated user + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/api-service_internal_models_auth.User' + "401": + description: Unauthorized + schema: + additionalProperties: + type: string + type: object + security: + - Bearer: [] + summary: Get current user info + tags: + - Authentication +schemes: +- http +- https +swagger: "2.0" diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..6477df7 --- /dev/null +++ b/go.mod @@ -0,0 +1,102 @@ +module api-service + +go 1.24.4 + +require ( + github.com/gin-gonic/gin v1.10.1 + github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/google/uuid v1.6.0 + github.com/jackc/pgx/v5 v5.7.2 // Ensure pgx is a direct dependency + go.mongodb.org/mongo-driver v1.17.3 + golang.org/x/crypto v0.41.0 + golang.org/x/sync v0.16.0 + gorm.io/driver/mysql v1.6.0 // GORM MySQL driver + gorm.io/driver/postgres v1.5.11 // Added GORM PostgreSQL driver + gorm.io/driver/sqlserver v1.6.1 // GORM SQL Server driver +) + +require ( + github.com/Masterminds/squirrel v1.5.4 + github.com/gin-contrib/cors v1.7.6 + github.com/go-playground/validator/v10 v10.27.0 + github.com/go-redis/redis_rate/v10 v10.0.1 + github.com/jmoiron/sqlx v1.4.0 + github.com/joho/godotenv v1.5.1 + github.com/lib/pq v1.10.9 + github.com/redis/go-redis/v9 v9.16.0 + github.com/spf13/viper v1.21.0 + github.com/swaggo/files v1.0.1 + github.com/swaggo/gin-swagger v1.6.0 + github.com/swaggo/swag v1.16.6 + golang.org/x/time v0.14.0 + gopkg.in/yaml.v2 v2.4.0 + gopkg.in/yaml.v3 v3.0.1 +) + +require ( + filippo.io/edwards25519 v1.1.0 // indirect + github.com/KyleBanks/depth v1.2.1 // indirect + github.com/PuerkitoBio/purell v1.1.1 // indirect + github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect + github.com/bytedance/sonic v1.14.0 // indirect + github.com/bytedance/sonic/loader v0.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cloudwego/base64x v0.1.6 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect + github.com/gabriel-vasile/mimetype v1.4.9 // indirect + github.com/gin-contrib/sse v1.1.0 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.19.6 // indirect + github.com/go-openapi/spec v0.20.4 // indirect + github.com/go-openapi/swag v0.19.15 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-sql-driver/mysql v1.8.1 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect + github.com/golang-sql/sqlexp v0.1.0 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect + github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mailru/easyjson v0.7.6 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/microsoft/go-mssqldb v1.8.2 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/montanaflynn/stats v0.7.1 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/sagikazarmark/locafero v0.11.0 // indirect + github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect + github.com/spf13/afero v1.15.0 // indirect + github.com/spf13/cast v1.10.0 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.3.0 // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.1.2 // indirect + github.com/xdg-go/stringprep v1.0.4 // indirect + github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect + golang.org/x/arch v0.20.0 // indirect + golang.org/x/mod v0.26.0 // indirect + golang.org/x/net v0.43.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/text v0.28.0 // indirect + golang.org/x/tools v0.35.0 // indirect + google.golang.org/protobuf v1.36.7 // indirect + gorm.io/gorm v1.30.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..4fc4172 --- /dev/null +++ b/go.sum @@ -0,0 +1,387 @@ +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1 h1:E+OJmp2tPvt1W+amx48v1eqbjDYsgN+RzP4q16yV5eM= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1/go.mod h1:a6xsAQUZg+VsS3TJ05SRp524Hs4pZ/AeFSr5ENf0Yjo= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1/go.mod h1:uE9zaUfEQT/nbQjVi2IblCG9iaLtZsuYZ8ne+PuQ02M= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0 h1:U2rTu3Ef+7w9FHKIAXM6ZyqF3UOWJZ12zIm8zECAFfg= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0 h1:jBQA3cKT4L2rWMpgE7Yt3Hwh2aUj8KXjIGLxjHeYNNo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0/go.mod h1:4OG6tQ9EOP/MT0NMjDlRzWoVFxfu9rN9B2X+tlSVktg= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1 h1:MyVTgWR8qd/Jw1Le0NZebGBUCLbtak3bJ3z1OlqZBpw= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1/go.mod h1:GpPjLhVR9dnUoJMyHWSPy71xY9/lcmpzIPZXmF0FCVY= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= +github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= +github.com/Masterminds/squirrel v1.5.4 h1:uUcX/aBc8O7Fg9kaISIUsHXdKuqehiXAMQTYX8afzqM= +github.com/Masterminds/squirrel v1.5.4/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA400rg+riTZj10= +github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ= +github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA= +github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA= +github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= +github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= +github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= +github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= +github.com/gin-contrib/cors v1.7.6 h1:3gQ8GMzs1Ylpf70y8bMw4fVpycXIeX1ZemuSQIsnQQY= +github.com/gin-contrib/cors v1.7.6/go.mod h1:Ulcl+xN4jel9t1Ry8vqph23a60FwH9xVLd+3ykmTjOk= +github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4= +github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk= +github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= +github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= +github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ= +github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs= +github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= +github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M= +github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= +github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= +github.com/go-redis/redis_rate/v10 v10.0.1 h1:calPxi7tVlxojKunJwQ72kwfozdy25RjA0bCj1h0MUo= +github.com/go-redis/redis_rate/v10 v10.0.1/go.mod h1:EMiuO9+cjRkR7UvdvwMO7vbgqJkltQHtwbdIQvaBKIU= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= +github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.7.2 h1:mLoDLV6sonKlvjIEsV56SkWNCnuNv531l94GaIzO+XI= +github.com/jackc/pgx/v5 v5.7.2/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= +github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= +github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= +github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= +github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= +github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= +github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= +github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= +github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/microsoft/go-mssqldb v1.8.2 h1:236sewazvC8FvG6Dr3bszrVhMkAl4KYImryLkRMCd0I= +github.com/microsoft/go-mssqldb v1.8.2/go.mod h1:vp38dT33FGfVotRiTmDo3bFyaHq+p3LektQrjTULowo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= +github.com/montanaflynn/stats v0.7.0/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= +github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/redis/go-redis/v9 v9.16.0 h1:OotgqgLSRCmzfqChbQyG1PHC3tLNR89DG4jdOERSEP4= +github.com/redis/go-redis/v9 v9.16.0/go.mod h1:u410H11HMLoB+TP67dz8rL9s6QW2j76l0//kSOd3370= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/sagikazarmark/locafero v0.11.0 h1:1iurJgmM9G3PA/I+wWYIOw/5SyBtxapeHDcg+AAIFXc= +github.com/sagikazarmark/locafero v0.11.0/go.mod h1:nVIGvgyzw595SUSUE6tvCp3YYTeHs15MvlmU87WwIik= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U= +github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I= +github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg= +github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY= +github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU= +github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= +github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= +github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE= +github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg= +github.com/swaggo/gin-swagger v1.6.0 h1:y8sxvQ3E20/RCyrXeFfg60r6H0Z+SwpTjMYsMm+zy8M= +github.com/swaggo/gin-swagger v1.6.0/go.mod h1:BG00cCEy294xtVpyIAHG6+e2Qzj/xKlRdOqDkvq0uzo= +github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI= +github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA= +github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= +github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= +github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= +github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ= +go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c= +golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= +golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= +golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= +golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= +golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0= +golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A= +google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg= +gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo= +gorm.io/driver/postgres v1.5.11 h1:ubBVAfbKEUld/twyKZ0IYn9rSQh448EdelLYk9Mv314= +gorm.io/driver/postgres v1.5.11/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI= +gorm.io/driver/sqlserver v1.6.1 h1:XWISFsu2I2pqd1KJhhTZNJMx1jNQ+zVL/Q8ovDcUjtY= +gorm.io/driver/sqlserver v1.6.1/go.mod h1:VZeNn7hqX1aXoN5TPAFGWvxWG90xtA8erGn2gQmpc6U= +gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= +gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/internal/config/config.go b/internal/config/config.go new file mode 100644 index 0000000..00ee1d0 --- /dev/null +++ b/internal/config/config.go @@ -0,0 +1,247 @@ +package config + +import ( + "fmt" + "log" + "os" + "strconv" + "strings" + "time" + + "github.com/go-playground/validator/v10" +) +type Config struct { + Server ServerConfig + Validator *validator.Validate +} +type ServerConfig struct { + Port int + Mode string +} + +func LoadConfig() *Config { + log.Printf("DEBUG: Raw ENV for SECURITY_MAX_INPUT_LENGTH is: '%s'", os.Getenv("SECURITY_MAX_INPUT_LENGTH")) + config := &Config{ + Server: ServerConfig{ + Port: getEnvAsInt("PORT", 8080), + Mode: getEnv("GIN_MODE", "debug"), + }, + } + // log.Printf("DEBUG: Final Config Object. MaxInputLength is: %d", config.Security.MaxInputLength) + // Initialize validator + config.Validator = validator.New() + return config +} + + +// Lakukan hal yang sama untuk loadKeycloakConfig + + +// Helper functions for getting default values based on database type +func getDefaultPort(dbType string) int { + switch dbType { + case "postgres": + return 5432 + case "mysql": + return 3306 + case "sqlserver": + return 1433 + case "mongodb": + return 27017 + case "sqlite": + return 0 // SQLite doesn't use port + default: + return 5432 + } +} + +func getDefaultSchema(dbType string) string { + switch dbType { + case "postgres": + return "public" + case "mysql": + return "" + case "sqlserver": + return "dbo" + case "mongodb": + return "" + case "sqlite": + return "" + default: + return "public" + } +} + +func getDefaultSSLMode(dbType string) string { + switch dbType { + case "postgres": + return "disable" + case "mysql": + return "false" + case "sqlserver": + return "false" + case "mongodb": + return "false" + case "sqlite": + return "" + default: + return "disable" + } +} + +func getDefaultMaxOpenConns(dbType string) int { + switch dbType { + case "postgres": + return 25 + case "mysql": + return 25 + case "sqlserver": + return 25 + case "mongodb": + return 100 + case "sqlite": + return 1 // SQLite only supports one writer at a time + default: + return 25 + } +} + +func getDefaultMaxIdleConns(dbType string) int { + switch dbType { + case "postgres": + return 25 + case "mysql": + return 25 + case "sqlserver": + return 25 + case "mongodb": + return 10 + case "sqlite": + return 1 // SQLite only supports one writer at a time + default: + return 25 + } +} + +func getDefaultConnMaxLifetime(dbType string) string { + switch dbType { + case "postgres": + return "5m" + case "mysql": + return "5m" + case "sqlserver": + return "5m" + case "mongodb": + return "30m" + case "sqlite": + return "5m" + default: + return "5m" + } +} + +func getEnvFromMap(config map[string]string, key, defaultValue string) string { + if value, exists := config[key]; exists { + return value + } + return defaultValue +} + +func getEnvAsIntFromMap(config map[string]string, key string, defaultValue int) int { + if value, exists := config[key]; exists { + if intValue, err := strconv.Atoi(value); err == nil { + return intValue + } + } + return defaultValue +} + +func getEnvAsBoolFromMap(config map[string]string, key string, defaultValue bool) bool { + if value, exists := config[key]; exists { + if boolValue, err := strconv.ParseBool(value); err == nil { + return boolValue + } + } + return defaultValue +} + +func parseDuration(durationStr string) time.Duration { + if duration, err := time.ParseDuration(durationStr); err == nil { + return duration + } + return 5 * time.Minute +} + +func getEnv(key, defaultValue string) string { + if value := os.Getenv(key); value != "" { + return value + } + return defaultValue +} + +func getEnvAsInt(key string, defaultValue int) int { + valueStr := getEnv(key, "") + if value, err := strconv.Atoi(valueStr); err == nil { + return value + } + return defaultValue +} + +func getEnvAsBool(key string, defaultValue bool) bool { + valueStr := getEnv(key, "") + if value, err := strconv.ParseBool(valueStr); err == nil { + return value + } + return defaultValue +} + +// parseSchemes parses comma-separated schemes string into a slice +func parseSchemes(schemesStr string) []string { + if schemesStr == "" { + return []string{"http"} + } + + schemes := strings.Split(schemesStr, ",") + for i, scheme := range schemes { + schemes[i] = strings.TrimSpace(scheme) + } + return schemes +} + +// parseStaticTokens parses comma-separated static tokens string into a slice +func parseStaticTokens(tokensStr string) []string { + if tokensStr == "" { + return []string{} + } + + tokens := strings.Split(tokensStr, ",") + for i, token := range tokens { + tokens[i] = strings.TrimSpace(token) + // Remove empty tokens + if tokens[i] == "" { + tokens = append(tokens[:i], tokens[i+1:]...) + i-- + } + } + return tokens +} + +func parseOrigins(originsStr string) []string { + if originsStr == "" { + return []string{"http://localhost:8080"} // Default untuk pengembangan + } + origins := strings.Split(originsStr, ",") + for i, origin := range origins { + origins[i] = strings.TrimSpace(origin) + } + return origins +} + +func (c *Config) Validate() error { + var errs []string + + if len(errs) > 0 { + return fmt.Errorf("configuration validation failed: %s", strings.Join(errs, "; ")) + } + return nil +} diff --git a/internal/database/database.go b/internal/database/database.go new file mode 100644 index 0000000..7c52113 --- /dev/null +++ b/internal/database/database.go @@ -0,0 +1 @@ +package database \ No newline at end of file diff --git a/internal/models/models.go b/internal/models/models.go new file mode 100644 index 0000000..2643ef8 --- /dev/null +++ b/internal/models/models.go @@ -0,0 +1,221 @@ +package models + +import ( + "database/sql" + "database/sql/driver" + "net/http" + "strconv" + "time" +) + +// NullableInt32 - your existing implementation +type NullableInt32 struct { + Int32 int32 `json:"int32,omitempty"` + Valid bool `json:"valid"` +} + +// Scan implements the sql.Scanner interface for NullableInt32 +func (n *NullableInt32) Scan(value interface{}) error { + var ni sql.NullInt32 + if err := ni.Scan(value); err != nil { + return err + } + n.Int32 = ni.Int32 + n.Valid = ni.Valid + return nil +} + +// Value implements the driver.Valuer interface for NullableInt32 +func (n NullableInt32) Value() (driver.Value, error) { + if !n.Valid { + return nil, nil + } + return n.Int32, nil +} + +// NullableString provides consistent nullable string handling +type NullableString struct { + String string `json:"string,omitempty"` + Valid bool `json:"valid"` +} + +// Scan implements the sql.Scanner interface for NullableString +func (n *NullableString) Scan(value interface{}) error { + var ns sql.NullString + if err := ns.Scan(value); err != nil { + return err + } + n.String = ns.String + n.Valid = ns.Valid + return nil +} + +// Value implements the driver.Valuer interface for NullableString +func (n NullableString) Value() (driver.Value, error) { + if !n.Valid { + return nil, nil + } + return n.String, nil +} + +// NullableTime provides consistent nullable time handling +type NullableTime struct { + Time time.Time `json:"time,omitempty"` + Valid bool `json:"valid"` +} + +// Scan implements the sql.Scanner interface for NullableTime +func (n *NullableTime) Scan(value interface{}) error { + var nt sql.NullTime + if err := nt.Scan(value); err != nil { + return err + } + n.Time = nt.Time + n.Valid = nt.Valid + return nil +} + +// Value implements the driver.Valuer interface for NullableTime +func (n NullableTime) Value() (driver.Value, error) { + if !n.Valid { + return nil, nil + } + return n.Time, nil +} + +// Metadata untuk pagination - dioptimalkan +type MetaResponse struct { + Limit int `json:"limit"` + Offset int `json:"offset"` + Total int `json:"total"` + TotalPages int `json:"total_pages"` + CurrentPage int `json:"current_page"` + HasNext bool `json:"has_next"` + HasPrev bool `json:"has_prev"` +} + +// Aggregate data untuk summary +type AggregateData struct { + TotalActive int `json:"total_active"` + TotalDraft int `json:"total_draft"` + TotalInactive int `json:"total_inactive"` + ByStatus map[string]int `json:"by_status"` + ByDinas map[string]int `json:"by_dinas,omitempty"` + ByJenis map[string]int `json:"by_jenis,omitempty"` + LastUpdated *time.Time `json:"last_updated,omitempty"` + CreatedToday int `json:"created_today"` + UpdatedToday int `json:"updated_today"` +} + +// Error response yang konsisten +type ErrorResponse struct { + Error string `json:"error"` + Code int `json:"code"` + Message string `json:"message"` + Timestamp time.Time `json:"timestamp"` +} + +// BaseRequest contains common fields for all BPJS requests +type BaseRequest struct { + RequestID string `json:"request_id,omitempty"` + Timestamp time.Time `json:"timestamp,omitempty"` +} + +// BaseResponse contains common response fields +type BaseResponse struct { + Status string `json:"status"` + Message string `json:"message,omitempty"` + RequestID string `json:"request_id,omitempty"` + Timestamp string `json:"timestamp,omitempty"` +} + +// ErrorResponse represents error response structure +type ErrorResponseBpjs struct { + Status string `json:"status"` + Message string `json:"message"` + RequestID string `json:"request_id,omitempty"` + Errors map[string]interface{} `json:"errors,omitempty"` + Code string `json:"code,omitempty"` +} + +// PaginationRequest contains pagination parameters +type PaginationRequest struct { + Page int `json:"page" validate:"min=1"` + Limit int `json:"limit" validate:"min=1,max=100"` + SortBy string `json:"sort_by,omitempty"` + SortDir string `json:"sort_dir,omitempty" validate:"omitempty,oneof=asc desc"` +} + +// PaginationResponse contains pagination metadata +type PaginationResponse struct { + CurrentPage int `json:"current_page"` + TotalPages int `json:"total_pages"` + TotalItems int64 `json:"total_items"` + ItemsPerPage int `json:"items_per_page"` + HasNext bool `json:"has_next"` + HasPrev bool `json:"has_previous"` +} + +// MetaInfo contains additional metadata +type MetaInfo struct { + Version string `json:"version"` + Environment string `json:"environment"` + ServerTime string `json:"server_time"` +} + +func GetStatusCodeFromMeta(metaCode interface{}) int { + statusCode := http.StatusOK + + if metaCode != nil { + switch v := metaCode.(type) { + case string: + if code, err := strconv.Atoi(v); err == nil { + if code >= 100 && code <= 599 { + statusCode = code + } else { + statusCode = http.StatusInternalServerError + } + } else { + statusCode = http.StatusInternalServerError + } + case int: + if v >= 100 && v <= 599 { + statusCode = v + } else { + statusCode = http.StatusInternalServerError + } + case float64: + code := int(v) + if code >= 100 && code <= 599 { + statusCode = code + } else { + statusCode = http.StatusInternalServerError + } + default: + statusCode = http.StatusInternalServerError + } + } + + return statusCode +} + +// Validation constants +const ( + StatusDraft = "draft" + StatusActive = "active" + StatusInactive = "inactive" + StatusDeleted = "deleted" +) + +// ValidStatuses untuk validasi +var ValidStatuses = []string{StatusDraft, StatusActive, StatusInactive} + +// IsValidStatus helper function +func IsValidStatus(status string) bool { + for _, validStatus := range ValidStatuses { + if status == validStatus { + return true + } + } + return false +} diff --git a/internal/models/validation.go b/internal/models/validation.go new file mode 100644 index 0000000..1462d35 --- /dev/null +++ b/internal/models/validation.go @@ -0,0 +1,106 @@ +package models + +import ( + "regexp" + "strings" + "time" + + "github.com/go-playground/validator/v10" +) + +// CustomValidator wraps the validator +type CustomValidator struct { + Validator *validator.Validate +} + +// Validate validates struct +func (cv *CustomValidator) Validate(i interface{}) error { + return cv.Validator.Struct(i) +} + +// RegisterCustomValidations registers custom validation rules +func RegisterCustomValidations(v *validator.Validate) { + // Validate Indonesian phone number + v.RegisterValidation("indonesian_phone", validateIndonesianPhone) + + // Validate BPJS card number format + v.RegisterValidation("bpjs_card", validateBPJSCard) + + // Validate Indonesian NIK + v.RegisterValidation("indonesian_nik", validateIndonesianNIK) + + // Validate date format YYYY-MM-DD + v.RegisterValidation("date_format", validateDateFormat) + + // Validate ICD-10 code format + v.RegisterValidation("icd10", validateICD10) + + // Validate ICD-9-CM procedure code + v.RegisterValidation("icd9cm", validateICD9CM) +} + +func validateIndonesianPhone(fl validator.FieldLevel) bool { + phone := fl.Field().String() + if phone == "" { + return true // Optional field + } + + // Indonesian phone number pattern: +62, 62, 08, or 8 + pattern := `^(\+?62|0?8)[1-9][0-9]{7,11}$` + matched, _ := regexp.MatchString(pattern, phone) + return matched +} + +func validateBPJSCard(fl validator.FieldLevel) bool { + card := fl.Field().String() + if len(card) != 13 { + return false + } + + // BPJS card should be numeric + pattern := `^\d{13}$` + matched, _ := regexp.MatchString(pattern, card) + return matched +} + +func validateIndonesianNIK(fl validator.FieldLevel) bool { + nik := fl.Field().String() + if len(nik) != 16 { + return false + } + + // NIK should be numeric + pattern := `^\d{16}$` + matched, _ := regexp.MatchString(pattern, nik) + return matched +} + +func validateDateFormat(fl validator.FieldLevel) bool { + dateStr := fl.Field().String() + _, err := time.Parse("2006-01-02", dateStr) + return err == nil +} + +func validateICD10(fl validator.FieldLevel) bool { + code := fl.Field().String() + if code == "" { + return true + } + + // Basic ICD-10 pattern: Letter followed by 2 digits, optional dot and more digits + pattern := `^[A-Z]\d{2}(\.\d+)?$` + matched, _ := regexp.MatchString(pattern, strings.ToUpper(code)) + return matched +} + +func validateICD9CM(fl validator.FieldLevel) bool { + code := fl.Field().String() + if code == "" { + return true + } + + // Basic ICD-9-CM procedure pattern: 2-4 digits with optional decimal + pattern := `^\d{2,4}(\.\d+)?$` + matched, _ := regexp.MatchString(pattern, code) + return matched +} diff --git a/internal/routes/v1/routes.go b/internal/routes/v1/routes.go new file mode 100644 index 0000000..15b5a8a --- /dev/null +++ b/internal/routes/v1/routes.go @@ -0,0 +1,54 @@ +package v1 + +import ( + "api-service/internal/config" + "github.com/gin-gonic/gin" +) + +func RegisterRoutes(cfg *config.Config) *gin.Engine { + // Atur mode Gin berdasarkan konfigurasi + gin.SetMode(cfg.Server.Mode) + router := gin.New() + + // ============================================================================= + // GLOBAL MIDDLEWARE STACK (Middleware yang diperlukan SEMUA route) + // ============================================================================= + + + // ============================================================================= + // INISIALISASI SERVIS & HANDLER + // ============================================================================= + + + // ============================================================================= + // SWAGGER DOCUMENTATION (Publik - TANPA SecurityHeaders) + // ============================================================================= + + + // ============================================================================= + // API GROUPS (Dengan Keamanan Ketat) + // ============================================================================= + // Terapkan middleware keamanan dan validasi input HANYA ke grup API. + // Ini adalah perubahan utama. + + + // --- HEALTH CHECK & SYSTEM ROUTES --- + + + // --- API v1 GROUP --- + // ============================================================================= + // PUBLIC ROUTES (No Authentication Required) + // ============================================================================= + + + // ============================================================================= + // PROTECTED ROUTES (Authentication Required) + // ============================================================================= + + + // ============================================================================= + // DEBUG ROUTES (Publik - Tanpa keamanan ketat) + // ============================================================================= + + return router +} diff --git a/internal/server/server.go b/internal/server/server.go new file mode 100644 index 0000000..8a59788 --- /dev/null +++ b/internal/server/server.go @@ -0,0 +1,48 @@ +package server + +import ( + "fmt" + "net/http" + "os" + "strconv" + "time" + + _ "github.com/joho/godotenv/autoload" + + "api-service/internal/config" +) + +type Server struct { + port int +} + +func NewServer() *http.Server { + // Load configuration + cfg := config.LoadConfig() + cfg.Validate() + + port, _ := strconv.Atoi(os.Getenv("PORT")) + if port == 0 { + port = cfg.Server.Port + } + + // if dbService == nil { // Check if the database service is already initialized + // dbService = database.New(cfg) // Initialize only once + // } + + NewServer := &Server{ + port: port, + // db: dbService, // Use the global database service instance + } + + // Declare Server config + server := &http.Server{ + Addr: fmt.Sprintf(":%d", NewServer.port), + // Handler: v1.RegisterRoutes(cfg), + IdleTimeout: time.Minute, + ReadTimeout: 10 * time.Second, + WriteTimeout: 30 * time.Second, + } + + return server +} diff --git a/internal/utils/query/README.md b/internal/utils/query/README.md new file mode 100644 index 0000000..c4363ee --- /dev/null +++ b/internal/utils/query/README.md @@ -0,0 +1,737 @@ +# Dynamic Query Builder + +Pustaka Go yang powerful untuk membangun query SQL dan MongoDB secara dinamis dengan dukungan filtering, sorting, pagination, join, CTE, window functions, dan operasi JSON/Array. + +## Fitur Utama + +- **Multi-Database**: Mendukung PostgreSQL, MySQL, SQLite, SQL Server, dan MongoDB +- **Dynamic Filtering**: Berbagai operator filter (_eq, _neq, _like, _in, _between, dll.) +- **JSON Operations**: Dukungan penuh untuk query dan update data JSON +- **Array Operations**: Query array dengan berbagai operator +- **Security**: Built-in proteksi SQL injection dan kontrol akses kolom/tabel +- **Window Functions**: Dukungan ROW_NUMBER, RANK, dll. +- **CTE & Unions**: Support untuk Common Table Expressions dan UNION +- **Query Parsing**: Parse URL query parameters menjadi DynamicQuery + +## Instalasi + +```bash +go get github.com/Masterminds/squirrel +go get github.com/jmoiron/sqlx +go get go.mongodb.org/mongo-driver/mongo +``` + +## Contoh Penggunaan + +### 1. Operator Perbandingan Dasar + +```go +package main + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/jmoiron/sqlx" + _ "github.com/lib/pq" +) + +func main() { + // Inisialisasi QueryBuilder + qb := NewQueryBuilder(DBTypePostgreSQL) + + // Koneksi ke database + db, err := sqlx.Connect("postgres", "user=postgres dbname=mydb sslmode=disable") + if err != nil { + log.Fatal(err) + } + defer db.Close() + + // Equal (_eq) + query := DynamicQuery{ + From: "users", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "email", + Operator: OpEqual, + Value: "john@example.com", + }}, + LogicOp: "AND", + }}, + } + + var users []map[string]interface{} + err = qb.ExecuteQuery(context.Background(), db, query, &users) + if err != nil { + log.Fatal(err) + } + fmt.Printf("Found %d users\n", len(users)) + + // Greater Than (_gt) + query = DynamicQuery{ + From: "products", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "price", + Operator: OpGreaterThan, + Value: 100000, + }}, + LogicOp: "AND", + }}, + Limit: 10, + } + + var products []map[string]interface{} + err = qb.ExecuteQuery(context.Background(), db, query, &products) + if err != nil { + log.Fatal(err) + } + fmt.Printf("Found %d products with price > 100000\n", len(products)) +} +``` + +### 2. Operator Like dan String Matching + +```go +func stringMatchingExample(qb *QueryBuilder, db *sqlx.DB) { + // Case Insensitive Like (_ilike) + query := DynamicQuery{ + From: "products", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "name", + Operator: OpILike, + Value: "%laptop%", + }}, + LogicOp: "AND", + }}, + Limit: 20, + } + + var products []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &products) + + // Contains (_contains) + query = DynamicQuery{ + From: "articles", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "content", + Operator: OpContains, + Value: "golang", + }}, + LogicOp: "AND", + }}, + } + + var articles []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &articles) + + // Starts With (_starts_with) + query = DynamicQuery{ + From: "users", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "username", + Operator: OpStartsWith, + Value: "admin", + }}, + LogicOp: "AND", + }}, + } + + var admins []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &admins) +} +``` + +### 3. Operator IN, NOT IN, dan Between + +```go +func inBetweenExample(qb *QueryBuilder, db *sqlx.DB) { + // In (_in) + query := DynamicQuery{ + From: "products", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "category", + Operator: OpIn, + Value: []string{"electronics", "computers", "gadgets"}, + }}, + LogicOp: "AND", + }}, + Limit: 50, + } + + var products []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &products) + + // Between (_between) + query = DynamicQuery{ + From: "orders", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "order_date", + Operator: OpBetween, + Value: []interface{}{time.Now().AddDate(0, -1, 0), time.Now()}, + }}, + LogicOp: "AND", + }}, + Sort: []SortField{{Column: "order_date", Order: "DESC"}}, + Limit: 100, + } + + var orders []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &orders) +} +``` + +### 4. Operator Null dan Exists + +```go +func nullExistsExample(qb *QueryBuilder, db *sqlx.DB) { + // Is Null (_null) + query := DynamicQuery{ + From: "users", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "deleted_at", + Operator: OpNull, + Value: nil, + }}, + LogicOp: "AND", + }}, + } + + var activeUsers []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &activeUsers) + + // Is Not Null (_nnull) + query = DynamicQuery{ + From: "products", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "description", + Operator: OpNotNull, + Value: nil, + }}, + LogicOp: "AND", + }}, + } + + var productsWithDesc []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &productsWithDesc) +} +``` + +### 5. Operator JSON + +```go +func jsonExample(qb *QueryBuilder, db *sqlx.DB) { + // JSON Contains (_json_contains) + query := DynamicQuery{ + From: "products", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "attributes", + Operator: OpJsonContains, + Value: `{"color": "red"}`, + }}, + LogicOp: "AND", + }}, + } + + var redProducts []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &redProducts) + + // JSON Exists (_json_exists) + query = DynamicQuery{ + From: "users", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "profile", + Operator: OpJsonExists, + Options: map[string]interface{}{ + "path": "$.social_media", + }, + }}, + LogicOp: "AND", + }}, + } + + var usersWithSocialMedia []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &usersWithSocialMedia) + + // JSON Equal (_json_eq) + query = DynamicQuery{ + From: "settings", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "preferences", + Operator: OpJsonEqual, + Value: "dark_mode", + Options: map[string]interface{}{ + "path": "$.theme", + }, + }}, + LogicOp: "AND", + }}, + } + + var darkModeUsers []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &darkModeUsers) +} +``` + +### 6. Operator Array + +```go +func arrayExample(qb *QueryBuilder, db *sqlx.DB) { + // Array Contains (_array_contains) + query := DynamicQuery{ + From: "products", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "tags", + Operator: OpArrayContains, + Value: "premium", + }}, + LogicOp: "AND", + }}, + } + + var premiumProducts []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &premiumProducts) + + // Array Length (_array_length) + query = DynamicQuery{ + From: "galleries", + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "images", + Operator: OpArrayLength, + Options: map[string]interface{}{ + "length": 5, + }, + }}, + LogicOp: "AND", + }}, + } + + var galleriesWith5Images []map[string]interface{} + qb.ExecuteQuery(context.Background(), db, query, &galleriesWith5Images) +} +``` + +### 7. Complex Query dengan Multiple Filters + +```go +func complexQueryExample(qb *QueryBuilder, db *sqlx.DB) { + // Query kompleks dengan multiple filter groups + query := DynamicQuery{ + From: "products", + Fields: []SelectField{ + {Expression: "id", Alias: "product_id"}, + {Expression: "name", Alias: "product_name"}, + {Expression: "price", Alias: "price"}, + {Expression: "category", Alias: "category"}, + }, + Filters: []FilterGroup{ + // Filter Group 1: Active products with price range + { + Filters: []DynamicFilter{ + { + Column: "status", + Operator: OpEqual, + Value: "active", + }, + { + Column: "price", + Operator: OpBetween, + Value: []interface{}{100000, 500000}, + }, + }, + LogicOp: "AND", + }, + // Filter Group 2: Category OR name matching + { + Filters: []DynamicFilter{ + { + Column: "category", + Operator: OpIn, + Value: []string{"electronics", "computers"}, + }, + { + Column: "name", + Operator: OpILike, + Value: "%laptop%", + }, + }, + LogicOp: "OR", + }, + }, + Sort: []SortField{ + {Column: "price", Order: "ASC"}, + {Column: "name", Order: "ASC"}, + }, + Limit: 20, + Offset: 0, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Fatal(err) + } + + fmt.Printf("Found %d products\n", len(results)) + for _, result := range results { + fmt.Printf("Product: %s, Price: %v\n", result["product_name"], result["price"]) + } +} +``` + +### 8. Query dengan JOIN + +```go +func joinExample(qb *QueryBuilder, db *sqlx.DB) { + query := DynamicQuery{ + From: "orders", + Aliases: "o", + Fields: []SelectField{ + {Expression: "o.id", Alias: "order_id"}, + {Expression: "o.order_date", Alias: "order_date"}, + {Expression: "c.name", Alias: "customer_name"}, + {Expression: "p.name", Alias: "product_name"}, + }, + Joins: []Join{ + { + Type: "INNER", + Table: "customers", + Alias: "c", + OnConditions: FilterGroup{ + Filters: []DynamicFilter{ + { + Column: "o.customer_id", + Operator: OpEqual, + Value: "c.id", + }, + }, + LogicOp: "AND", + }, + }, + { + Type: "LEFT", + Table: "products", + Alias: "p", + OnConditions: FilterGroup{ + Filters: []DynamicFilter{ + { + Column: "o.product_id", + Operator: OpEqual, + Value: "p.id", + }, + }, + LogicOp: "AND", + }, + }, + }, + Filters: []FilterGroup{{ + Filters: []DynamicFilter{ + { + Column: "o.order_date", + Operator: OpGreaterThanEqual, + Value: time.Now().AddDate(0, -1, 0), + }, + }, + LogicOp: "AND", + }}, + Sort: []SortField{ + {Column: "o.order_date", Order: "DESC"}, + }, + Limit: 50, + } + + var orders []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &orders) + if err != nil { + log.Fatal(err) + } + + fmt.Printf("Found %d orders\n", len(orders)) +} +``` + +### 9. Query dengan Window Functions + +```go +func windowFunctionExample(qb *QueryBuilder, db *sqlx.DB) { + query := DynamicQuery{ + From: "sales", + Fields: []SelectField{ + {Expression: "id", Alias: "sale_id"}, + {Expression: "product_name", Alias: "product"}, + {Expression: "amount", Alias: "sale_amount"}, + }, + WindowFunctions: []WindowFunction{ + { + Function: "ROW_NUMBER", + Over: "product_name", + OrderBy: "amount DESC", + Alias: "rank_in_category", + }, + { + Function: "SUM", + Over: "product_name", + OrderBy: "sale_date", + Frame: "ROWS UNBOUNDED PRECEDING", + Alias: "running_total", + }, + }, + Sort: []SortField{ + {Column: "product_name", Order: "ASC"}, + {Column: "amount", Order: "DESC"}, + }, + } + + var sales []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &sales) + if err != nil { + log.Fatal(err) + } + + fmt.Printf("Found %d sales records\n", len(sales)) +} +``` + +### 10. MongoDB Example + +```go +import ( + "context" + "fmt" + "log" + "time" + + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +func mongodbExample() { + // Inisialisasi MongoQueryBuilder + mqb := NewMongoQueryBuilder() + + // Koneksi ke MongoDB + client, err := mongo.Connect(context.Background(), options.Client().ApplyURI("mongodb://localhost:27017")) + if err != nil { + log.Fatal(err) + } + defer client.Disconnect(context.Background()) + + collection := client.Database("mydb").Collection("products") + + // Query dengan filter + query := DynamicQuery{ + From: "products", + Filters: []FilterGroup{ + { + Filters: []DynamicFilter{ + { + Column: "status", + Operator: OpEqual, + Value: "active", + }, + { + Column: "price", + Operator: OpBetween, + Value: []interface{}{100000, 500000}, + }, + }, + LogicOp: "AND", + }, + { + Filters: []DynamicFilter{ + { + Column: "category", + Operator: OpIn, + Value: []string{"electronics", "computers"}, + }, + { + Column: "name", + Operator: OpILike, + Value: "laptop", + }, + }, + LogicOp: "OR", + }, + }, + Sort: []SortField{ + {Column: "price", Order: "ASC"}, + {Column: "name", Order: "ASC"}, + }, + Limit: 20, + Offset: 0, + } + + var results []map[string]interface{} + err = mqb.ExecuteFind(context.Background(), collection, query, &results) + if err != nil { + log.Fatal(err) + } + + fmt.Printf("Found %d products\n", len(results)) + for _, result := range results { + fmt.Printf("Product: %s, Price: %v\n", result["name"], result["price"]) + } +} +``` + +### 11. URL Query Parsing + +```go +import ( + "net/url" +) + +func urlParsingExample() { + // Parse URL query parameters + values, _ := url.ParseQuery("filter[status][_eq]=active&filter[price][_gte]=100000&sort=-created_at&limit=20") + + parser := NewQueryParser() + query, err := parser.ParseQuery(values, "products") + if err != nil { + log.Fatal(err) + } + + // Query akan berisi: + // - Filters: status = 'active' AND price >= 100000 + // - Sort: created_at DESC + // - Limit: 20 + + qb := NewQueryBuilder(DBTypePostgreSQL) + db, _ := sqlx.Connect("postgres", "connection-string") + + var products []map[string]interface{} + err = qb.ExecuteQuery(context.Background(), db, query, &products) + if err != nil { + log.Fatal(err) + } +} +``` + +## Konfigurasi Keamanan + +```go +func securityExample() { + qb := NewQueryBuilder(DBTypePostgreSQL) + + // Aktifkan security checks + qb.SetSecurityOptions(true, 1000) // max 1000 rows + + // Tentukan kolom yang diizinkan + qb.SetAllowedColumns([]string{ + "id", "name", "email", "status", "created_at", + "price", "category", "description", + }) + + // Tentukan tabel yang diizinkan + qb.SetAllowedTables([]string{ + "users", "products", "orders", "categories", + }) + + // Query dengan kolom/tabel tidak diizinkan akan error + query := DynamicQuery{ + From: "users", // OK + Filters: []FilterGroup{{ + Filters: []DynamicFilter{{ + Column: "password", // ERROR: kolom tidak diizinkan + Operator: OpEqual, + Value: "secret", + }}, + LogicOp: "AND", + }}, + } + + // Akan mengembalikan error + _, _, err := qb.BuildQuery(query) + fmt.Println(err) // "disallowed column: password" +} +``` + +## Referensi Operator + +### Operator Perbandingan +- `_eq` - Equal +- `_neq` - Not Equal +- `_gt` - Greater Than +- `_gte` - Greater Than Equal +- `_lt` - Less Than +- `_lte` - Less Than Equal + +### Operator String +- `_like` - Like (case-sensitive) +- `_ilike` - Like (case-insensitive) +- `_nlike` - Not Like (case-sensitive) +- `_nilike` - Not Like (case-insensitive) +- `_contains` - Contains substring +- `_ncontains` - Not Contains substring +- `_starts_with` - Starts with +- `_ends_with` - Ends with + +### Operator Set +- `_in` - In list +- `_nin` - Not In list +- `_between` - Between two values +- `_nbetween` - Not Between two values + +### Operator Null +- `_null` - Is Null +- `_nnull` - Is Not Null + +### Operator JSON +- `_json_contains` - JSON contains value +- `_json_ncontains` - JSON not contains value +- `_json_exists` - JSON path exists +- `_json_nexists` - JSON path not exists +- `_json_eq` - JSON path equals value +- `_json_neq` - JSON path not equals value + +### Operator Array +- `_array_contains` - Array contains value +- `_array_ncontains` - Array not contains value +- `_array_length` - Array has specific length + +## Best Practices + +1. **Selalu gunakan security checks** saat production: + ```go + qb.SetSecurityOptions(true, 1000) + qb.SetAllowedColumns(allowedColumns) + qb.SetAllowedTables(allowedTables) + ``` + +2. **Gunakan context dengan timeout** untuk query: + ```go + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + ``` + +3. **Validasi input user** sebelum membuat DynamicFilter + +4. **Gunakan prepared statements** (sudah otomatis di QueryBuilder) + +5. **Log query** untuk debugging: + ```go + qb.SetQueryLogging(true) + ``` + +## Kontribusi + +Pull requests are welcome! For major changes, please open an issue first to discuss what you would like to change. + +## License + +MIT License \ No newline at end of file diff --git a/internal/utils/query/builder.go b/internal/utils/query/builder.go new file mode 100644 index 0000000..37a9482 --- /dev/null +++ b/internal/utils/query/builder.go @@ -0,0 +1,2728 @@ +package utils + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "strings" + "time" + + "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +// DBType represents the type of database +type DBType string + +const ( + DBTypePostgreSQL DBType = "postgres" + DBTypeMySQL DBType = "mysql" + DBTypeSQLite DBType = "sqlite" + DBTypeSQLServer DBType = "sqlserver" + DBTypeMongoDB DBType = "mongodb" +) + +// FilterOperator represents supported filter operators +type FilterOperator string + +const ( + OpEqual FilterOperator = "_eq" + OpNotEqual FilterOperator = "_neq" + OpLike FilterOperator = "_like" + OpILike FilterOperator = "_ilike" + OpNotLike FilterOperator = "_nlike" + OpNotILike FilterOperator = "_nilike" + OpIn FilterOperator = "_in" + OpNotIn FilterOperator = "_nin" + OpGreaterThan FilterOperator = "_gt" + OpGreaterThanEqual FilterOperator = "_gte" + OpLessThan FilterOperator = "_lt" + OpLessThanEqual FilterOperator = "_lte" + OpBetween FilterOperator = "_between" + OpNotBetween FilterOperator = "_nbetween" + OpNull FilterOperator = "_null" + OpNotNull FilterOperator = "_nnull" + OpContains FilterOperator = "_contains" + OpNotContains FilterOperator = "_ncontains" + OpStartsWith FilterOperator = "_starts_with" + OpEndsWith FilterOperator = "_ends_with" + OpJsonContains FilterOperator = "_json_contains" + OpJsonNotContains FilterOperator = "_json_ncontains" + OpJsonExists FilterOperator = "_json_exists" + OpJsonNotExists FilterOperator = "_json_nexists" + OpJsonEqual FilterOperator = "_json_eq" + OpJsonNotEqual FilterOperator = "_json_neq" + OpArrayContains FilterOperator = "_array_contains" + OpArrayNotContains FilterOperator = "_array_ncontains" + OpArrayLength FilterOperator = "_array_length" +) + +// DynamicFilter represents a single filter condition +type DynamicFilter struct { + Column string `json:"column"` + Operator FilterOperator `json:"operator"` + Value interface{} `json:"value"` + // Additional options for complex filters + Options map[string]interface{} `json:"options,omitempty"` +} + +// FilterGroup represents a group of filters with a logical operator (AND/OR) +type FilterGroup struct { + Filters []DynamicFilter `json:"filters"` + LogicOp string `json:"logic_op"` // AND, OR +} + +// SelectField represents a field in the SELECT clause, supporting expressions and aliases +type SelectField struct { + Expression string `json:"expression"` // e.g., "TMLogBarang.Nama", "COUNT(*)" + Alias string `json:"alias"` // e.g., "obat_nama", "total_count" + // Window function support + WindowFunction *WindowFunction `json:"window_function,omitempty"` +} + +// WindowFunction represents a window function with its configuration +type WindowFunction struct { + Function string `json:"function"` // e.g., "ROW_NUMBER", "RANK", "DENSE_RANK", "LEAD", "LAG" + Over string `json:"over"` // PARTITION BY expression + OrderBy string `json:"order_by"` // ORDER BY expression + Frame string `json:"frame"` // ROWS/RANGE clause + Alias string `json:"alias"` // Alias for the window function +} + +// Join represents a JOIN clause +type Join struct { + Type string `json:"type"` // "INNER", "LEFT", "RIGHT", "FULL" + Table string `json:"table"` // Table name to join + Alias string `json:"alias"` // Table alias + OnConditions FilterGroup `json:"on_conditions"` // Conditions for the ON clause + // LATERAL JOIN support + Lateral bool `json:"lateral,omitempty"` +} + +// Union represents a UNION clause +type Union struct { + Type string `json:"type"` // "UNION", "UNION ALL" + Query DynamicQuery `json:"query"` // The subquery to union with +} + +// CTE (Common Table Expression) represents a WITH clause +type CTE struct { + Name string `json:"name"` // CTE alias name + Query DynamicQuery `json:"query"` // The query defining the CTE + // Recursive CTE support + Recursive bool `json:"recursive,omitempty"` +} + +// DynamicQuery represents the complete query structure +type DynamicQuery struct { + Fields []SelectField `json:"fields,omitempty"` + From string `json:"from"` // Main table name + Aliases string `json:"aliases"` // Main table alias + Joins []Join `json:"joins,omitempty"` + Filters []FilterGroup `json:"filters,omitempty"` + GroupBy []string `json:"group_by,omitempty"` + Having []FilterGroup `json:"having,omitempty"` + Unions []Union `json:"unions,omitempty"` + CTEs []CTE `json:"ctes,omitempty"` + Sort []SortField `json:"sort,omitempty"` + Limit int `json:"limit"` + Offset int `json:"offset"` + // Window function support + WindowFunctions []WindowFunction `json:"window_functions,omitempty"` + // JSON operations + JsonOperations []JsonOperation `json:"json_operations,omitempty"` +} + +// JsonOperation represents a JSON operation +type JsonOperation struct { + Type string `json:"type"` // "extract", "exists", "contains", etc. + Column string `json:"column"` // JSON column + Path string `json:"path"` // JSON path + Value interface{} `json:"value,omitempty"` // Value for comparison + Alias string `json:"alias,omitempty"` // Alias for the result +} + +// SortField represents sorting configuration +type SortField struct { + Column string `json:"column"` + Order string `json:"order"` // ASC, DESC +} + +// UpdateData represents data for UPDATE operations +type UpdateData struct { + Columns []string `json:"columns"` + Values []interface{} `json:"values"` + // JSON update support + JsonUpdates map[string]JsonUpdate `json:"json_updates,omitempty"` +} + +// JsonUpdate represents a JSON update operation +type JsonUpdate struct { + Path string `json:"path"` // JSON path + Value interface{} `json:"value"` // New value +} + +// InsertData represents data for INSERT operations +type InsertData struct { + Columns []string `json:"columns"` + Values []interface{} `json:"values"` + // JSON insert support + JsonValues map[string]interface{} `json:"json_values,omitempty"` +} + +// QueryBuilder builds SQL queries from dynamic filters using squirrel +type QueryBuilder struct { + dbType DBType + sqlBuilder squirrel.StatementBuilderType + allowedColumns map[string]bool // Security: only allow specified columns + allowedTables map[string]bool // Security: only allow specified tables + // Security settings + enableSecurityChecks bool + maxAllowedRows int + // SQL injection prevention patterns + dangerousPatterns []*regexp.Regexp + // Query logging + enableQueryLogging bool + // Connection timeout settings + queryTimeout time.Duration +} + +// NewQueryBuilder creates a new query builder instance for a specific database type +func NewQueryBuilder(dbType DBType) *QueryBuilder { + var placeholderFormat squirrel.PlaceholderFormat + + switch dbType { + case DBTypePostgreSQL: + placeholderFormat = squirrel.Dollar + case DBTypeMySQL, DBTypeSQLite: + placeholderFormat = squirrel.Question + case DBTypeSQLServer: + placeholderFormat = squirrel.AtP + default: + placeholderFormat = squirrel.Question + } + + // Initialize dangerous patterns for SQL injection prevention + dangerousPatterns := []*regexp.Regexp{ + regexp.MustCompile(`(?i)(union|select|insert|update|delete|drop|alter|create|exec|execute)\s`), + regexp.MustCompile(`(?i)(--|\/\*|\*\/)`), + regexp.MustCompile(`(?i)(or|and)\s+1\s*=\s*1`), + regexp.MustCompile(`(?i)(or|and)\s+true`), + regexp.MustCompile(`(?i)(xp_|sp_)\w+`), // SQL Server extended procedures + regexp.MustCompile(`(?i)(waitfor\s+delay)`), // SQL Server time-based attack + regexp.MustCompile(`(?i)(benchmark|sleep)\s*\(`), // MySQL time-based attack + regexp.MustCompile(`(?i)(pg_sleep)\s*\(`), // PostgreSQL time-based attack + regexp.MustCompile(`(?i)(load_file|into\s+outfile)`), // File operations + regexp.MustCompile(`(?i)(information_schema|sysobjects|syscolumns)`), // System tables + } + + return &QueryBuilder{ + dbType: dbType, + sqlBuilder: squirrel.StatementBuilder.PlaceholderFormat(placeholderFormat), + allowedColumns: make(map[string]bool), + allowedTables: make(map[string]bool), + enableSecurityChecks: true, + maxAllowedRows: 10000, + dangerousPatterns: dangerousPatterns, + enableQueryLogging: true, + queryTimeout: 30 * time.Second, + } +} + +// SetSecurityOptions configures security settings +func (qb *QueryBuilder) SetSecurityOptions(enableChecks bool, maxRows int) *QueryBuilder { + qb.enableSecurityChecks = enableChecks + qb.maxAllowedRows = maxRows + return qb +} + +// SetAllowedColumns sets the list of allowed columns for security +func (qb *QueryBuilder) SetAllowedColumns(columns []string) *QueryBuilder { + qb.allowedColumns = make(map[string]bool) + for _, col := range columns { + qb.allowedColumns[col] = true + } + return qb +} + +// SetAllowedTables sets the list of allowed tables for security +func (qb *QueryBuilder) SetAllowedTables(tables []string) *QueryBuilder { + qb.allowedTables = make(map[string]bool) + for _, table := range tables { + qb.allowedTables[table] = true + } + return qb +} + +// SetQueryLogging enables or disables query logging +func (qb *QueryBuilder) SetQueryLogging(enable bool) *QueryBuilder { + qb.enableQueryLogging = enable + return qb +} + +// SetQueryTimeout sets the default query timeout +func (qb *QueryBuilder) SetQueryTimeout(timeout time.Duration) *QueryBuilder { + qb.queryTimeout = timeout + return qb +} + +// BuildQuery builds the complete SQL SELECT query with support for CTEs, JOINs, and UNIONs +func (qb *QueryBuilder) BuildQuery(query DynamicQuery) (string, []interface{}, error) { + var allArgs []interface{} + var queryParts []string + + // Security check for limit + if qb.enableSecurityChecks && query.Limit > qb.maxAllowedRows { + return "", nil, fmt.Errorf("requested limit %d exceeds maximum allowed %d", query.Limit, qb.maxAllowedRows) + } + + // Security check for table name + if qb.enableSecurityChecks && len(qb.allowedTables) > 0 && !qb.allowedTables[query.From] { + return "", nil, fmt.Errorf("disallowed table: %s", query.From) + } + + // 1. Build CTEs (WITH clause) + if len(query.CTEs) > 0 { + cteClause, cteArgs, err := qb.buildCTEClause(query.CTEs) + if err != nil { + return "", nil, err + } + queryParts = append(queryParts, cteClause) + allArgs = append(allArgs, cteArgs...) + } + + // 2. Build Main Query using Squirrel's From and Join methods + fromClause := qb.buildFromClause(query.From, query.Aliases) + selectFields := qb.buildSelectFields(query.Fields) + + // Start building the main query + var mainQuery squirrel.SelectBuilder + if len(query.WindowFunctions) > 0 || len(query.JsonOperations) > 0 { + // We need to add window functions and JSON operations after initial select + mainQuery = qb.sqlBuilder.Select(selectFields...).From(fromClause) + } else { + mainQuery = qb.sqlBuilder.Select(selectFields...).From(fromClause) + } + + // Add JOINs using Squirrel's Join method + if len(query.Joins) > 0 { + for _, join := range query.Joins { + // Security check for joined table + if qb.enableSecurityChecks && len(qb.allowedTables) > 0 && !qb.allowedTables[join.Table] { + return "", nil, fmt.Errorf("disallowed table in join: %s", join.Table) + } + + joinType, tableWithAlias, onClause, joinArgs, err := qb.buildSingleJoinClause(join) + if err != nil { + return "", nil, err + } + joinStr := tableWithAlias + " ON " + onClause + switch strings.ToUpper(joinType) { + case "LEFT": + if join.Lateral { + mainQuery = mainQuery.LeftJoin("LATERAL "+joinStr, joinArgs...) + } else { + mainQuery = mainQuery.LeftJoin(joinStr, joinArgs...) + } + case "RIGHT": + mainQuery = mainQuery.RightJoin(joinStr, joinArgs...) + case "FULL": + mainQuery = mainQuery.Join("FULL JOIN "+joinStr, joinArgs...) + default: + if join.Lateral { + mainQuery = mainQuery.Join("LATERAL "+joinStr, joinArgs...) + } else { + mainQuery = mainQuery.Join(joinStr, joinArgs...) + } + } + } + } + + // 4. Apply WHERE conditions + if len(query.Filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(query.Filters) + if err != nil { + return "", nil, err + } + mainQuery = mainQuery.Where(whereClause, whereArgs...) + } + + // 5. Apply GROUP BY + if len(query.GroupBy) > 0 { + mainQuery = mainQuery.GroupBy(qb.buildGroupByColumns(query.GroupBy)...) + } + + // 6. Apply HAVING conditions + if len(query.Having) > 0 { + havingClause, havingArgs, err := qb.BuildWhereClause(query.Having) + if err != nil { + return "", nil, err + } + mainQuery = mainQuery.Having(havingClause, havingArgs...) + } + + // 7. Apply ORDER BY + if len(query.Sort) > 0 { + for _, sort := range query.Sort { + column := qb.validateAndEscapeColumn(sort.Column) + if column == "" { + continue + } + order := "ASC" + if strings.ToUpper(sort.Order) == "DESC" { + order = "DESC" + } + mainQuery = mainQuery.OrderBy(fmt.Sprintf("%s %s", column, order)) + } + } + + // 8. Apply window functions and JSON operations by modifying the SELECT clause + if len(query.WindowFunctions) > 0 || len(query.JsonOperations) > 0 { + // We need to rebuild the SELECT clause with window functions and JSON operations + var finalSelectFields []string + finalSelectFields = append(finalSelectFields, selectFields...) + + // Add window functions + for _, wf := range query.WindowFunctions { + windowFunc, err := qb.buildWindowFunction(wf) + if err != nil { + return "", nil, err + } + finalSelectFields = append(finalSelectFields, windowFunc) + } + + // Add JSON operations + for _, jo := range query.JsonOperations { + jsonExpr, jsonArgs, err := qb.buildJsonOperation(jo) + if err != nil { + return "", nil, err + } + if jo.Alias != "" { + jsonExpr += " AS " + qb.escapeIdentifier(jo.Alias) + } + finalSelectFields = append(finalSelectFields, jsonExpr) + allArgs = append(allArgs, jsonArgs...) + } + + // Rebuild the query with the complete SELECT clause + mainQuery = qb.sqlBuilder.Select(finalSelectFields...).From(fromClause) + + // Re-apply all the other clauses + if len(query.Joins) > 0 { + for _, join := range query.Joins { + // Security check for joined table + if qb.enableSecurityChecks && len(qb.allowedTables) > 0 && !qb.allowedTables[join.Table] { + return "", nil, fmt.Errorf("disallowed table in join: %s", join.Table) + } + + joinType, tableWithAlias, onClause, joinArgs, err := qb.buildSingleJoinClause(join) + if err != nil { + return "", nil, err + } + joinStr := tableWithAlias + " ON " + onClause + switch strings.ToUpper(joinType) { + case "LEFT": + if join.Lateral { + mainQuery = mainQuery.LeftJoin("LATERAL "+joinStr, joinArgs...) + } else { + mainQuery = mainQuery.LeftJoin(joinStr, joinArgs...) + } + case "RIGHT": + mainQuery = mainQuery.RightJoin(joinStr, joinArgs...) + case "FULL": + mainQuery = mainQuery.Join("FULL JOIN "+joinStr, joinArgs...) + default: + if join.Lateral { + mainQuery = mainQuery.Join("LATERAL "+joinStr, joinArgs...) + } else { + mainQuery = mainQuery.Join(joinStr, joinArgs...) + } + } + } + } + + if len(query.Filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(query.Filters) + if err != nil { + return "", nil, err + } + mainQuery = mainQuery.Where(whereClause, whereArgs...) + } + + if len(query.GroupBy) > 0 { + mainQuery = mainQuery.GroupBy(qb.buildGroupByColumns(query.GroupBy)...) + } + + if len(query.Having) > 0 { + havingClause, havingArgs, err := qb.BuildWhereClause(query.Having) + if err != nil { + return "", nil, err + } + mainQuery = mainQuery.Having(havingClause, havingArgs...) + } + + if len(query.Sort) > 0 { + for _, sort := range query.Sort { + column := qb.validateAndEscapeColumn(sort.Column) + if column == "" { + continue + } + order := "ASC" + if strings.ToUpper(sort.Order) == "DESC" { + order = "DESC" + } + mainQuery = mainQuery.OrderBy(fmt.Sprintf("%s %s", column, order)) + } + } + } + + // 9. Apply pagination with dialect-specific syntax + if query.Limit > 0 { + if qb.dbType == DBTypeSQLServer { + // SQL Server requires ORDER BY for OFFSET FETCH + if len(query.Sort) == 0 { + mainQuery = mainQuery.OrderBy("(SELECT 1)") + } + mainQuery = mainQuery.Suffix(fmt.Sprintf("OFFSET %d ROWS FETCH NEXT %d ROWS ONLY", query.Offset, query.Limit)) + } else { + mainQuery = mainQuery.Limit(uint64(query.Limit)) + if query.Offset > 0 { + mainQuery = mainQuery.Offset(uint64(query.Offset)) + } + } + } else if query.Offset > 0 && qb.dbType != DBTypeSQLServer { + mainQuery = mainQuery.Offset(uint64(query.Offset)) + } + + // Build final main query SQL + sql, args, err := mainQuery.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build main query: %w", err) + } + queryParts = append(queryParts, sql) + allArgs = append(allArgs, args...) + + // 10. Apply UNIONs + if len(query.Unions) > 0 { + unionClause, unionArgs, err := qb.buildUnionClause(query.Unions) + if err != nil { + return "", nil, err + } + queryParts = append(queryParts, unionClause) + allArgs = append(allArgs, unionArgs...) + } + + finalSQL := strings.Join(queryParts, " ") + + // Security check for dangerous patterns in user input values + if qb.enableSecurityChecks { + if err := qb.checkForSqlInjectionInArgs(allArgs); err != nil { + return "", nil, err + } + } + + // Security check for dangerous patterns in the final SQL + // if qb.enableSecurityChecks { + // if err := qb.checkForSqlInjectionInSQL(finalSQL); err != nil { + // return "", nil, err + // } + // } + + if qb.enableQueryLogging { + fmt.Printf("[DEBUG BuilderQuery] Final SQL query: %s\n", finalSQL) + fmt.Printf("[DEBUG] Query args: %v\n", allArgs) + } + return finalSQL, allArgs, nil +} + +// buildWindowFunction builds a window function expression +func (qb *QueryBuilder) buildWindowFunction(wf WindowFunction) (string, error) { + if !qb.isValidFunctionName(wf.Function) { + return "", fmt.Errorf("invalid window function name: %s", wf.Function) + } + + windowExpr := fmt.Sprintf("%s() OVER (", wf.Function) + + if wf.Over != "" { + windowExpr += fmt.Sprintf("PARTITION BY %s ", wf.Over) + } + + if wf.OrderBy != "" { + windowExpr += fmt.Sprintf("ORDER BY %s ", wf.OrderBy) + } + + if wf.Frame != "" { + windowExpr += wf.Frame + } + + windowExpr += ")" + + if wf.Alias != "" { + windowExpr += " AS " + qb.escapeIdentifier(wf.Alias) + } + + return windowExpr, nil +} + +// buildJsonOperation builds a JSON operation expression +func (qb *QueryBuilder) buildJsonOperation(jo JsonOperation) (string, []interface{}, error) { + column := qb.validateAndEscapeColumn(jo.Column) + if column == "" { + return "", nil, fmt.Errorf("invalid or disallowed column: %s", jo.Column) + } + + path := jo.Path + if path == "" { + path = "$" + } + + var expr string + var args []interface{} + + switch strings.ToLower(jo.Type) { + case "extract": + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s->>%s", column, qb.escapeJsonPath(path)) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_EXTRACT(%s, '%s')", column, path) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s')", column, qb.escapeSqlServerJsonPath(path)) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s')", column, path) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case "exists": + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("jsonb_path_exists(%s, '%s')", column, path) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS_PATH(%s, 'one', '%s')", column, path) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') IS NOT NULL", column, qb.escapeSqlServerJsonPath(path)) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') IS NOT NULL", column, path) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case "contains": + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s @> %s", column, "?") + args = append(args, jo.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS(%s, ?, '%s')", column, path) + args = append(args, jo.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') = ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, jo.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') = ?", column, path) + args = append(args, jo.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + default: + return "", nil, fmt.Errorf("unsupported JSON operation type: %s", jo.Type) + } + + return expr, args, nil +} + +// escapeJsonPath escapes a JSON path for PostgreSQL +func (qb *QueryBuilder) escapeJsonPath(path string) string { + // Simple implementation - in a real scenario, you'd need more sophisticated escaping + return "'" + strings.ReplaceAll(path, "'", "''") + "'" +} + +// escapeSqlServerJsonPath escapes a JSON path for SQL Server +func (qb *QueryBuilder) escapeSqlServerJsonPath(path string) string { + // Convert JSONPath to SQL Server format + // $.path.to.property -> '$.path.to.property' + if !strings.HasPrefix(path, "$") { + path = "$." + path + } + return strings.ReplaceAll(path, ".", ".") +} + +// buildCTEClause builds the WITH clause for Common Table Expressions +func (qb *QueryBuilder) buildCTEClause(ctes []CTE) (string, []interface{}, error) { + var cteParts []string + var allArgs []interface{} + + hasRecursive := false + for _, cte := range ctes { + if cte.Recursive { + hasRecursive = true + break + } + } + + withClause := "WITH" + if hasRecursive { + withClause = "WITH RECURSIVE" + } + + for _, cte := range ctes { + subQuery, args, err := qb.BuildQuery(cte.Query) + if err != nil { + return "", nil, fmt.Errorf("failed to build CTE '%s': %w", cte.Name, err) + } + cteParts = append(cteParts, fmt.Sprintf("%s AS (%s)", qb.escapeIdentifier(cte.Name), subQuery)) + allArgs = append(allArgs, args...) + } + + return fmt.Sprintf("%s %s", withClause, strings.Join(cteParts, ", ")), allArgs, nil +} + +// buildFromClause builds the FROM clause with optional alias +func (qb *QueryBuilder) buildFromClause(table, alias string) string { + fromClause := qb.escapeIdentifier(table) + if alias != "" { + fromClause += " " + qb.escapeIdentifier(alias) + } + return fromClause +} + +// buildSingleJoinClause builds a single JOIN clause components +func (qb *QueryBuilder) buildSingleJoinClause(join Join) (string, string, string, []interface{}, error) { + joinType := strings.ToUpper(join.Type) + if joinType == "" { + joinType = "INNER" + } + + table := qb.escapeIdentifier(join.Table) + if join.Alias != "" { + table += " " + qb.escapeIdentifier(join.Alias) + } + + onClause, onArgs, err := qb.BuildWhereClause([]FilterGroup{join.OnConditions}) + if err != nil { + return "", "", "", nil, fmt.Errorf("failed to build ON clause for join on table %s: %w", join.Table, err) + } + + return joinType, table, onClause, onArgs, nil +} + +// buildUnionClause builds the UNION clause +func (qb *QueryBuilder) buildUnionClause(unions []Union) (string, []interface{}, error) { + var unionParts []string + var allArgs []interface{} + + for _, union := range unions { + subQuery, args, err := qb.BuildQuery(union.Query) + if err != nil { + return "", nil, fmt.Errorf("failed to build subquery for UNION: %w", err) + } + unionType := strings.ToUpper(union.Type) + if unionType == "" { + unionType = "UNION" + } + unionParts = append(unionParts, fmt.Sprintf("%s %s", unionType, subQuery)) + allArgs = append(allArgs, args...) + } + + return strings.Join(unionParts, " "), allArgs, nil +} + +// buildSelectFields builds the SELECT fields from SelectField structs +func (qb *QueryBuilder) buildSelectFields(fields []SelectField) []string { + if len(fields) == 0 { + return []string{"*"} + } + + var selectedFields []string + for _, field := range fields { + expr := field.Expression + if expr == "" { + continue + } + // Basic validation for expression + if !qb.isValidExpression(expr) { + continue + } + + // Handle window functions + if field.WindowFunction != nil { + windowFunc, err := qb.buildWindowFunction(*field.WindowFunction) + if err != nil { + continue + } + expr = windowFunc + } + + if field.Alias != "" { + selectedFields = append(selectedFields, fmt.Sprintf("%s AS %s", expr, qb.escapeIdentifier(field.Alias))) + } else { + selectedFields = append(selectedFields, expr) + } + } + + if len(selectedFields) == 0 { + return []string{"*"} + } + + return selectedFields +} + +// BuildWhereClause builds WHERE/HAVING conditions from FilterGroups +func (qb *QueryBuilder) BuildWhereClause(filterGroups []FilterGroup) (string, []interface{}, error) { + if len(filterGroups) == 0 { + return "", nil, nil + } + + var conditions []string + var allArgs []interface{} + + for i, group := range filterGroups { + if len(group.Filters) == 0 { + continue + } + + groupCondition, groupArgs, err := qb.buildFilterGroup(group) + if err != nil { + return "", nil, err + } + + if groupCondition != "" { + if i > 0 { + logicOp := "AND" + if group.LogicOp != "" { + logicOp = strings.ToUpper(group.LogicOp) + } + conditions = append(conditions, logicOp) + } + conditions = append(conditions, fmt.Sprintf("(%s)", groupCondition)) + allArgs = append(allArgs, groupArgs...) + } + } + + return strings.Join(conditions, " "), allArgs, nil +} + +// buildFilterGroup builds conditions for a single filter group +func (qb *QueryBuilder) buildFilterGroup(group FilterGroup) (string, []interface{}, error) { + var conditions []string + var args []interface{} + logicOp := "AND" + if group.LogicOp != "" { + logicOp = strings.ToUpper(group.LogicOp) + } + + for i, filter := range group.Filters { + condition, filterArgs, err := qb.buildFilterCondition(filter) + if err != nil { + return "", nil, err + } + + if condition != "" { + if i > 0 { + conditions = append(conditions, logicOp) + } + conditions = append(conditions, condition) + args = append(args, filterArgs...) + } + } + + return strings.Join(conditions, " "), args, nil +} + +// buildFilterCondition builds a single filter condition with dialect-specific logic +func (qb *QueryBuilder) buildFilterCondition(filter DynamicFilter) (string, []interface{}, error) { + column := qb.validateAndEscapeColumn(filter.Column) + if column == "" { + return "", nil, fmt.Errorf("invalid or disallowed column: %s", filter.Column) + } + + // Handle column-to-column comparison + if valStr, ok := filter.Value.(string); ok && strings.Contains(valStr, ".") && qb.isValidExpression(valStr) && len(strings.Split(valStr, ".")) == 2 { + escapedVal := qb.escapeColumnReference(valStr) + switch filter.Operator { + case OpEqual: + return fmt.Sprintf("%s = %s", column, escapedVal), nil, nil + case OpNotEqual: + return fmt.Sprintf("%s <> %s", column, escapedVal), nil, nil + case OpGreaterThan: + return fmt.Sprintf("%s > %s", column, escapedVal), nil, nil + case OpLessThan: + return fmt.Sprintf("%s < %s", column, escapedVal), nil, nil + } + } + + // Handle JSON operations + switch filter.Operator { + case OpJsonContains, OpJsonNotContains, OpJsonExists, OpJsonNotExists, OpJsonEqual, OpJsonNotEqual: + return qb.buildJsonFilterCondition(filter) + case OpArrayContains, OpArrayNotContains, OpArrayLength: + return qb.buildArrayFilterCondition(filter) + } + + // Handle standard operators + switch filter.Operator { + case OpEqual: + if filter.Value == nil { + return fmt.Sprintf("%s IS NULL", column), nil, nil + } + return fmt.Sprintf("%s = ?", column), []interface{}{filter.Value}, nil + case OpNotEqual: + if filter.Value == nil { + return fmt.Sprintf("%s IS NOT NULL", column), nil, nil + } + return fmt.Sprintf("%s <> ?", column), []interface{}{filter.Value}, nil + case OpLike: + if filter.Value == nil { + return "", nil, nil + } + return fmt.Sprintf("%s LIKE ?", column), []interface{}{filter.Value}, nil + case OpILike: + if filter.Value == nil { + return "", nil, nil + } + switch qb.dbType { + case DBTypePostgreSQL, DBTypeSQLite: + return fmt.Sprintf("%s ILIKE ?", column), []interface{}{filter.Value}, nil + case DBTypeMySQL, DBTypeSQLServer: + return fmt.Sprintf("LOWER(%s) LIKE LOWER(?)", column), []interface{}{filter.Value}, nil + default: + return fmt.Sprintf("%s LIKE ?", column), []interface{}{filter.Value}, nil + } + case OpIn, OpNotIn: + values := qb.parseArrayValue(filter.Value) + if len(values) == 0 { + return "1=0", nil, nil + } + op := "IN" + if filter.Operator == OpNotIn { + op = "NOT IN" + } + placeholders := squirrel.Placeholders(len(values)) + return fmt.Sprintf("%s %s (%s)", column, op, placeholders), values, nil + case OpGreaterThan, OpGreaterThanEqual, OpLessThan, OpLessThanEqual: + if filter.Value == nil { + return "", nil, nil + } + op := strings.TrimPrefix(string(filter.Operator), "_") + return fmt.Sprintf("%s %s ?", column, op), []interface{}{filter.Value}, nil + case OpBetween, OpNotBetween: + values := qb.parseArrayValue(filter.Value) + if len(values) != 2 { + return "", nil, fmt.Errorf("between operator requires exactly 2 values") + } + op := "BETWEEN" + if filter.Operator == OpNotBetween { + op = "NOT BETWEEN" + } + return fmt.Sprintf("%s %s ? AND ?", column, op), []interface{}{values[0], values[1]}, nil + case OpNull: + return fmt.Sprintf("%s IS NULL", column), nil, nil + case OpNotNull: + return fmt.Sprintf("%s IS NOT NULL", column), nil, nil + case OpContains, OpNotContains, OpStartsWith, OpEndsWith: + if filter.Value == nil { + return "", nil, nil + } + var value string + switch filter.Operator { + case OpContains, OpNotContains: + value = fmt.Sprintf("%%%v%%", filter.Value) + case OpStartsWith: + value = fmt.Sprintf("%v%%", filter.Value) + case OpEndsWith: + value = fmt.Sprintf("%%%v", filter.Value) + } + + switch qb.dbType { + case DBTypePostgreSQL, DBTypeSQLite: + op := "ILIKE" + if strings.Contains(string(filter.Operator), "Not") { + op = "NOT ILIKE" + } + return fmt.Sprintf("%s %s ?", column, op), []interface{}{value}, nil + case DBTypeMySQL, DBTypeSQLServer: + op := "LIKE" + if strings.Contains(string(filter.Operator), "Not") { + op = "NOT LIKE" + } + return fmt.Sprintf("LOWER(%s) %s LOWER(?)", column, op), []interface{}{value}, nil + default: + op := "LIKE" + if strings.Contains(string(filter.Operator), "Not") { + op = "NOT LIKE" + } + return fmt.Sprintf("%s %s ?", column, op), []interface{}{value}, nil + } + default: + return "", nil, fmt.Errorf("unsupported operator: %s", filter.Operator) + } +} + +// buildJsonFilterCondition builds a JSON filter condition +func (qb *QueryBuilder) buildJsonFilterCondition(filter DynamicFilter) (string, []interface{}, error) { + column := qb.validateAndEscapeColumn(filter.Column) + if column == "" { + return "", nil, fmt.Errorf("invalid or disallowed column: %s", filter.Column) + } + + path := "$" + if pathOption, ok := filter.Options["path"].(string); ok && pathOption != "" { + path = pathOption + } + + var expr string + var args []interface{} + + switch filter.Operator { + case OpJsonContains: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s @> ?", column) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS(%s, ?, '%s')", column, path) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') = ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') = ?", column, path) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonNotContains: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("NOT (%s @> ?)", column) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("NOT JSON_CONTAINS(%s, ?, '%s')", column, path) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') <> ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') <> ?", column, path) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonExists: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("jsonb_path_exists(%s, '%s')", column, path) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS_PATH(%s, 'one', '%s')", column, path) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') IS NOT NULL", column, qb.escapeSqlServerJsonPath(path)) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') IS NOT NULL", column, path) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonNotExists: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("NOT jsonb_path_exists(%s, '%s')", column, path) + case DBTypeMySQL: + expr = fmt.Sprintf("NOT JSON_CONTAINS_PATH(%s, 'one', '%s')", column, path) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') IS NULL", column, qb.escapeSqlServerJsonPath(path)) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') IS NULL", column, path) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonEqual: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s->>%s = ?", column, qb.escapeJsonPath(path)) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_EXTRACT(%s, '%s') = ?", column, path) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') = ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') = ?", column, path) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonNotEqual: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s->>%s <> ?", column, qb.escapeJsonPath(path)) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_EXTRACT(%s, '%s') <> ?", column, path) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') <> ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') <> ?", column, path) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + default: + return "", nil, fmt.Errorf("unsupported JSON operator: %s", filter.Operator) + } + + return expr, args, nil +} + +// buildArrayFilterCondition builds an array filter condition +func (qb *QueryBuilder) buildArrayFilterCondition(filter DynamicFilter) (string, []interface{}, error) { + column := qb.validateAndEscapeColumn(filter.Column) + if column == "" { + return "", nil, fmt.Errorf("invalid or disallowed column: %s", filter.Column) + } + + var expr string + var args []interface{} + + switch filter.Operator { + case OpArrayContains: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("? = ANY(%s)", column) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS(%s, JSON_QUOTE(?))", column) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("? IN (SELECT value FROM OPENJSON(%s))", column) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("EXISTS (SELECT 1 FROM json_each(%s) WHERE json_each.value = ?)", column) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("Array operations not supported for database type: %s", qb.dbType) + } + case OpArrayNotContains: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("? <> ALL(%s)", column) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("NOT JSON_CONTAINS(%s, JSON_QUOTE(?))", column) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("? NOT IN (SELECT value FROM OPENJSON(%s))", column) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("NOT EXISTS (SELECT 1 FROM json_each(%s) WHERE json_each.value = ?)", column) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("Array operations not supported for database type: %s", qb.dbType) + } + case OpArrayLength: + switch qb.dbType { + case DBTypePostgreSQL: + if lengthOption, ok := filter.Options["length"].(int); ok { + expr = fmt.Sprintf("array_length(%s, 1) = ?", column) + args = append(args, lengthOption) + } else { + return "", nil, fmt.Errorf("array_length operator requires 'length' option") + } + case DBTypeMySQL: + if lengthOption, ok := filter.Options["length"].(int); ok { + expr = fmt.Sprintf("JSON_LENGTH(%s) = ?", column) + args = append(args, lengthOption) + } else { + return "", nil, fmt.Errorf("array_length operator requires 'length' option") + } + case DBTypeSQLServer: + if lengthOption, ok := filter.Options["length"].(int); ok { + expr = fmt.Sprintf("(SELECT COUNT(*) FROM OPENJSON(%s)) = ?", column) + args = append(args, lengthOption) + } else { + return "", nil, fmt.Errorf("array_length operator requires 'length' option") + } + case DBTypeSQLite: + if lengthOption, ok := filter.Options["length"].(int); ok { + expr = fmt.Sprintf("json_array_length(%s) = ?", column) + args = append(args, lengthOption) + } else { + return "", nil, fmt.Errorf("array_length operator requires 'length' option") + } + default: + return "", nil, fmt.Errorf("Array operations not supported for database type: %s", qb.dbType) + } + default: + return "", nil, fmt.Errorf("unsupported array operator: %s", filter.Operator) + } + + return expr, args, nil +} + +// ============================================================================= +// SECTION 6: EXECUTION METHODS (NEW) +// Metode untuk mengeksekusi query langsung dengan logging performa. +// ============================================================================= + +func (qb *QueryBuilder) ExecuteQuery(ctx context.Context, db *sqlx.DB, query DynamicQuery, dest interface{}) error { + // sql, args, err := qb.BuildQuery(query) + // if err != nil { + // return err + // } + // start := time.Now() + // err = db.SelectContext(ctx, dest, sql, args...) + // fmt.Printf("[DEBUG] Query executed in %v\n", time.Since(start)) + // return err + sql, args, err := qb.BuildQuery(query) + if err != nil { + return err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && qb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, qb.queryTimeout) + defer cancel() + } + + start := time.Now() + + // Check if dest is a pointer to a slice of maps + destValue := reflect.ValueOf(dest) + if destValue.Kind() != reflect.Ptr || destValue.IsNil() { + return fmt.Errorf("dest must be a non-nil pointer") + } + + destElem := destValue.Elem() + if destElem.Kind() == reflect.Slice { + sliceType := destElem.Type().Elem() + if sliceType.Kind() == reflect.Map && + sliceType.Key().Kind() == reflect.String && + sliceType.Elem().Kind() == reflect.Interface { + + // Handle slice of map[string]interface{} + rows, err := db.QueryxContext(ctx, sql, args...) + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + row := make(map[string]interface{}) + if err := rows.MapScan(row); err != nil { + return err + } + destElem.Set(reflect.Append(destElem, reflect.ValueOf(row))) + } + + fmt.Printf("[DEBUG] Query executed in %v\n", time.Since(start)) + return nil + } + } + + // Default case: use SelectContext + err = db.SelectContext(ctx, dest, sql, args...) + if qb.enableQueryLogging { + fmt.Printf("[DEBUG] Query executed in %v\n", time.Since(start)) + } + return err +} + +func (qb *QueryBuilder) ExecuteQueryRow(ctx context.Context, db *sqlx.DB, query DynamicQuery, dest interface{}) error { + sql, args, err := qb.BuildQuery(query) + if err != nil { + return err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && qb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, qb.queryTimeout) + defer cancel() + } + + start := time.Now() + err = db.GetContext(ctx, dest, sql, args...) + if qb.enableQueryLogging { + fmt.Printf("[DEBUG] QueryRow executed in %v\n", time.Since(start)) + } + return err +} + +func (qb *QueryBuilder) ExecuteCount(ctx context.Context, db *sqlx.DB, query DynamicQuery) (int64, error) { + sql, args, err := qb.BuildCountQuery(query) + if err != nil { + return 0, err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && qb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, qb.queryTimeout) + defer cancel() + } + + var count int64 + start := time.Now() + err = db.GetContext(ctx, &count, sql, args...) + if qb.enableQueryLogging { + fmt.Printf("[DEBUG] Count query executed in %v\n", time.Since(start)) + } + return count, err +} + +func (qb *QueryBuilder) ExecuteInsert(ctx context.Context, db *sqlx.DB, table string, data InsertData, returningColumns ...string) (sql.Result, error) { + // Security check for table name + if qb.enableSecurityChecks && len(qb.allowedTables) > 0 && !qb.allowedTables[table] { + return nil, fmt.Errorf("disallowed table: %s", table) + } + + sql, args, err := qb.BuildInsertQuery(table, data, returningColumns...) + if err != nil { + return nil, err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && qb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, qb.queryTimeout) + defer cancel() + } + + start := time.Now() + result, err := db.ExecContext(ctx, sql, args...) + if qb.enableQueryLogging { + fmt.Printf("[DEBUG] Insert query executed in %v\n", time.Since(start)) + } + return result, err +} + +func (qb *QueryBuilder) ExecuteUpdate(ctx context.Context, db *sqlx.DB, table string, updateData UpdateData, filters []FilterGroup, returningColumns ...string) (sql.Result, error) { + // Security check for table name + if qb.enableSecurityChecks && len(qb.allowedTables) > 0 && !qb.allowedTables[table] { + return nil, fmt.Errorf("disallowed table: %s", table) + } + + sql, args, err := qb.BuildUpdateQuery(table, updateData, filters, returningColumns...) + if err != nil { + return nil, err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && qb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, qb.queryTimeout) + defer cancel() + } + + start := time.Now() + result, err := db.ExecContext(ctx, sql, args...) + if qb.enableQueryLogging { + fmt.Printf("[DEBUG] Update query executed in %v\n", time.Since(start)) + } + return result, err +} + +func (qb *QueryBuilder) ExecuteDelete(ctx context.Context, db *sqlx.DB, table string, filters []FilterGroup, returningColumns ...string) (sql.Result, error) { + // Security check for table name + if qb.enableSecurityChecks && len(qb.allowedTables) > 0 && !qb.allowedTables[table] { + return nil, fmt.Errorf("disallowed table: %s", table) + } + + sql, args, err := qb.BuildDeleteQuery(table, filters, returningColumns...) + if err != nil { + return nil, err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && qb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, qb.queryTimeout) + defer cancel() + } + + start := time.Now() + result, err := db.ExecContext(ctx, sql, args...) + if qb.enableQueryLogging { + fmt.Printf("[DEBUG] Delete query executed in %v\n", time.Since(start)) + } + return result, err +} + +func (qb *QueryBuilder) ExecuteUpsert(ctx context.Context, db *sqlx.DB, table string, insertData InsertData, conflictColumns []string, updateColumns []string, returningColumns ...string) (sql.Result, error) { + // Security check for table name + if qb.enableSecurityChecks && len(qb.allowedTables) > 0 && !qb.allowedTables[table] { + return nil, fmt.Errorf("disallowed table: %s", table) + } + + sql, args, err := qb.BuildUpsertQuery(table, insertData, conflictColumns, updateColumns, returningColumns...) + if err != nil { + return nil, err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && qb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, qb.queryTimeout) + defer cancel() + } + + start := time.Now() + result, err := db.ExecContext(ctx, sql, args...) + if qb.enableQueryLogging { + fmt.Printf("[DEBUG] Upsert query executed in %v\n", time.Since(start)) + } + return result, err +} + +// --- Helper and Validation Methods --- + +func (qb *QueryBuilder) buildGroupByColumns(fields []string) []string { + var groupCols []string + for _, field := range fields { + col := qb.validateAndEscapeColumn(field) + if col != "" { + groupCols = append(groupCols, col) + } + } + return groupCols +} + +func (qb *QueryBuilder) parseArrayValue(value interface{}) []interface{} { + if value == nil { + return nil + } + if reflect.TypeOf(value).Kind() == reflect.Slice { + v := reflect.ValueOf(value) + result := make([]interface{}, v.Len()) + for i := 0; i < v.Len(); i++ { + result[i] = v.Index(i).Interface() + } + return result + } + if str, ok := value.(string); ok { + if strings.Contains(str, ",") { + parts := strings.Split(str, ",") + result := make([]interface{}, len(parts)) + for i, part := range parts { + result[i] = strings.TrimSpace(part) + } + return result + } + return []interface{}{str} + } + return []interface{}{value} +} + +func (qb *QueryBuilder) validateAndEscapeColumn(field string) string { + if field == "" { + return "" + } + // Allow complex expressions like functions + if strings.Contains(field, "(") { + if qb.isValidExpression(field) { + return field // Don't escape complex expressions, assume they are safe + } + return "" + } + // Handle dotted column names like "table.column" + if strings.Contains(field, ".") { + if qb.isValidExpression(field) { + // Split on dot and escape each part + parts := strings.Split(field, ".") + var escapedParts []string + for _, part := range parts { + escapedParts = append(escapedParts, qb.escapeIdentifier(part)) + } + return strings.Join(escapedParts, ".") + } + return "" + } + // Simple column name + if qb.allowedColumns != nil && !qb.allowedColumns[field] { + return "" + } + return qb.escapeIdentifier(field) +} + +func (qb *QueryBuilder) isValidExpression(expr string) bool { + // This is a simplified check. A more robust solution might use a proper SQL parser library. + // For now, we allow alphanumeric, underscore, dots, parentheses, and common operators. + // For SQL Server, allow brackets [] and spaces for column names. + allowedChars := "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.,() *-/[]" + for _, r := range expr { + if !strings.ContainsRune(allowedChars, r) { + return false + } + } + // Check for dangerous keywords + dangerousPatterns := []string{"--", "/*", "*/", "union", "select", "insert", "update", "delete", "drop", "alter", "create", "exec", "execute"} + lowerExpr := strings.ToLower(expr) + for _, pattern := range dangerousPatterns { + if strings.Contains(lowerExpr, pattern) { + return false + } + } + return true +} + +func (qb *QueryBuilder) isValidFunctionName(name string) bool { + // Check if the function name is a valid SQL function + validFunctions := map[string]bool{ + // Aggregate functions + "count": true, "sum": true, "avg": true, "min": true, "max": true, + // Window functions + "row_number": true, "rank": true, "dense_rank": true, "ntile": true, + "lag": true, "lead": true, "first_value": true, "last_value": true, + // JSON functions + "json_extract": true, "json_contains": true, "json_search": true, + "json_array": true, "json_object": true, "json_merge": true, + // Other functions + "concat": true, "substring": true, "upper": true, "lower": true, + "trim": true, "coalesce": true, "nullif": true, "isnull": true, + } + + return validFunctions[strings.ToLower(name)] +} + +func (qb *QueryBuilder) escapeColumnReference(col string) string { + parts := strings.Split(col, ".") + var escaped []string + for _, p := range parts { + if strings.HasPrefix(p, "[") && strings.HasSuffix(p, "]") { + escaped = append(escaped, p) + } else { + escaped = append(escaped, qb.escapeIdentifier(p)) + } + } + return strings.Join(escaped, ".") +} + +func (qb *QueryBuilder) escapeIdentifier(col string) string { + switch qb.dbType { + case DBTypePostgreSQL, DBTypeSQLite: + return fmt.Sprintf("\"%s\"", strings.ReplaceAll(col, "\"", "\"\"")) + case DBTypeMySQL: + return fmt.Sprintf("`%s`", strings.ReplaceAll(col, "`", "``")) + case DBTypeSQLServer: + return fmt.Sprintf("[%s]", strings.ReplaceAll(col, "]", "]]")) + default: + return col + } +} + +// checkForSqlInjectionInArgs checks for potential SQL injection patterns in query arguments +func (qb *QueryBuilder) checkForSqlInjectionInArgs(args []interface{}) error { + if !qb.enableSecurityChecks { + return nil + } + + for _, arg := range args { + if str, ok := arg.(string); ok { + lowerStr := strings.ToLower(str) + // Check for dangerous patterns specifically in user input values + dangerousPatterns := []*regexp.Regexp{ + regexp.MustCompile(`(?i)(union\s+select)`), + regexp.MustCompile(`(?i)(or\s+1\s*=\s*1)`), + regexp.MustCompile(`(?i)(and\s+true)`), + regexp.MustCompile(`(?i)(waitfor\s+delay)`), + regexp.MustCompile(`(?i)(benchmark|sleep)\s*\(`), + regexp.MustCompile(`(?i)(pg_sleep)\s*\(`), + regexp.MustCompile(`(?i)(load_file|into\s+outfile)`), + regexp.MustCompile(`(?i)(information_schema|sysobjects|syscolumns)`), + regexp.MustCompile(`(?i)(--|\/\*|\*\/)`), + } + + for _, pattern := range dangerousPatterns { + if pattern.MatchString(lowerStr) { + return fmt.Errorf("potential SQL injection detected in query argument: pattern %s matched", pattern.String()) + } + } + } + } + return nil +} + +// checkForSqlInjectionInSQL checks for potential SQL injection patterns in the final SQL +func (qb *QueryBuilder) checkForSqlInjectionInSQL(sql string) error { + if !qb.enableSecurityChecks { + return nil + } + + // Check for dangerous patterns in the final SQL + // But allow valid SQL keywords in their proper context + lowerSQL := strings.ToLower(sql) + + // More specific patterns that actually indicate injection attempts + dangerousPatterns := []*regexp.Regexp{ + regexp.MustCompile(`(?i)(union\s+select)`), // UNION followed by SELECT + regexp.MustCompile(`(?i)(select\s+.*\s+from\s+.*\s+where\s+.*\s+or\s+1\s*=\s*1)`), // Classic SQL injection + regexp.MustCompile(`(?i)(drop\s+table)`), // DROP TABLE + regexp.MustCompile(`(?i)(delete\s+from)`), // DELETE FROM + regexp.MustCompile(`(?i)(insert\s+into)`), // INSERT INTO + regexp.MustCompile(`(?i)(update\s+.*\s+set)`), // UPDATE SET + regexp.MustCompile(`(?i)(alter\s+table)`), // ALTER TABLE + regexp.MustCompile(`(?i)(create\s+table)`), // CREATE TABLE + regexp.MustCompile(`(?i)(exec\s*\(|execute\s*\()`), // EXEC/EXECUTE functions + regexp.MustCompile(`(?i)(--|\/\*|\*\/)`), // SQL comments + } + + for _, pattern := range dangerousPatterns { + if pattern.MatchString(lowerSQL) { + return fmt.Errorf("potential SQL injection detected in SQL: pattern %s matched", pattern.String()) + } + } + + return nil +} + +// --- Other Query Builders (Insert, Update, Delete, Upsert, Count) --- + +// BuildCountQuery builds a count query +func (qb *QueryBuilder) BuildCountQuery(query DynamicQuery) (string, []interface{}, error) { + // For a count query, we don't need fields, joins, or unions. + // We only need FROM, WHERE, GROUP BY, HAVING. + countQuery := DynamicQuery{ + From: query.From, + Aliases: query.Aliases, + Filters: query.Filters, + GroupBy: query.GroupBy, + Having: query.Having, + // Joins are important for count with filters on joined tables + Joins: query.Joins, + } + + // Build the base query for the count using Squirrel's From and Join methods + fromClause := qb.buildFromClause(countQuery.From, countQuery.Aliases) + baseQuery := qb.sqlBuilder.Select("COUNT(*)").From(fromClause) + + // Add JOINs using Squirrel's Join method + if len(countQuery.Joins) > 0 { + for _, join := range countQuery.Joins { + // Security check for joined table + if qb.enableSecurityChecks && len(qb.allowedTables) > 0 && !qb.allowedTables[join.Table] { + return "", nil, fmt.Errorf("disallowed table in join: %s", join.Table) + } + + joinType, tableWithAlias, onClause, joinArgs, err := qb.buildSingleJoinClause(join) + if err != nil { + return "", nil, err + } + joinStr := tableWithAlias + " ON " + onClause + switch strings.ToUpper(joinType) { + case "LEFT": + baseQuery = baseQuery.LeftJoin(joinStr, joinArgs...) + case "RIGHT": + baseQuery = baseQuery.RightJoin(joinStr, joinArgs...) + case "FULL": + baseQuery = baseQuery.Join("FULL JOIN "+joinStr, joinArgs...) + default: + baseQuery = baseQuery.Join(joinStr, joinArgs...) + } + } + } + + if len(countQuery.Filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(countQuery.Filters) + if err != nil { + return "", nil, err + } + baseQuery = baseQuery.Where(whereClause, whereArgs...) + } + + if len(countQuery.GroupBy) > 0 { + baseQuery = baseQuery.GroupBy(qb.buildGroupByColumns(countQuery.GroupBy)...) + } + + if len(countQuery.Having) > 0 { + havingClause, havingArgs, err := qb.BuildWhereClause(countQuery.Having) + if err != nil { + return "", nil, err + } + baseQuery = baseQuery.Having(havingClause, havingArgs...) + } + + sql, args, err := baseQuery.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build COUNT query: %w", err) + } + + if qb.enableQueryLogging { + fmt.Printf("[DEBUG] COUNT SQL query: %s\n", sql) + fmt.Printf("[DEBUG] COUNT query args: %v\n", args) + } + return sql, args, nil +} + +// BuildInsertQuery builds an INSERT query +func (qb *QueryBuilder) BuildInsertQuery(table string, data InsertData, returningColumns ...string) (string, []interface{}, error) { + // Validate columns + for _, col := range data.Columns { + if qb.allowedColumns != nil && !qb.allowedColumns[col] { + return "", nil, fmt.Errorf("disallowed column: %s", col) + } + } + + // Start with basic insert + insert := qb.sqlBuilder.Insert(table).Columns(data.Columns...).Values(data.Values...) + + // Handle JSON values - we need to modify the insert statement + if len(data.JsonValues) > 0 { + // Create a new insert builder with all columns including JSON columns + allColumns := make([]string, len(data.Columns)) + copy(allColumns, data.Columns) + + allValues := make([]interface{}, len(data.Values)) + copy(allValues, data.Values) + + for col, val := range data.JsonValues { + allColumns = append(allColumns, col) + jsonVal, err := json.Marshal(val) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + allValues = append(allValues, jsonVal) + } + + insert = qb.sqlBuilder.Insert(table).Columns(allColumns...).Values(allValues...) + } + + if len(returningColumns) > 0 { + if qb.dbType == DBTypePostgreSQL { + insert = insert.Suffix("RETURNING " + strings.Join(returningColumns, ", ")) + } else { + return "", nil, fmt.Errorf("RETURNING not supported for database type: %s", qb.dbType) + } + } + + sql, args, err := insert.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build INSERT query: %w", err) + } + + return sql, args, nil +} + +// BuildUpdateQuery builds an UPDATE query +func (qb *QueryBuilder) BuildUpdateQuery(table string, updateData UpdateData, filters []FilterGroup, returningColumns ...string) (string, []interface{}, error) { + // Validate columns + for _, col := range updateData.Columns { + if qb.allowedColumns != nil && !qb.allowedColumns[col] { + return "", nil, fmt.Errorf("disallowed column: %s", col) + } + } + + // Start with basic update + update := qb.sqlBuilder.Update(table).SetMap(qb.buildSetMap(updateData)) + + // Handle JSON updates - we need to modify the update statement + if len(updateData.JsonUpdates) > 0 { + // Create a new set map including JSON updates + setMap := qb.buildSetMap(updateData) + + for col, jsonUpdate := range updateData.JsonUpdates { + switch qb.dbType { + case DBTypePostgreSQL: + jsonVal, err := json.Marshal(jsonUpdate.Value) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + // Use jsonb_set function for updating specific paths + setMap[col] = squirrel.Expr(fmt.Sprintf("jsonb_set(%s, '%s', ?)", qb.escapeIdentifier(col), jsonUpdate.Path), jsonVal) + case DBTypeMySQL: + jsonVal, err := json.Marshal(jsonUpdate.Value) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + // Use JSON_SET function for updating specific paths + setMap[col] = squirrel.Expr(fmt.Sprintf("JSON_SET(%s, '%s', ?)", qb.escapeIdentifier(col), jsonUpdate.Path), jsonVal) + case DBTypeSQLServer: + jsonVal, err := json.Marshal(jsonUpdate.Value) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + // Use JSON_MODIFY function for updating specific paths + setMap[col] = squirrel.Expr(fmt.Sprintf("JSON_MODIFY(%s, '%s', ?)", qb.escapeIdentifier(col), jsonUpdate.Path), jsonVal) + case DBTypeSQLite: + jsonVal, err := json.Marshal(jsonUpdate.Value) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + // SQLite doesn't have a built-in JSON_SET function, so we need to use json_patch + setMap[col] = squirrel.Expr(fmt.Sprintf("json_patch(%s, ?)", qb.escapeIdentifier(col)), jsonVal) + } + } + + update = qb.sqlBuilder.Update(table).SetMap(setMap) + } + + if len(filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(filters) + if err != nil { + return "", nil, err + } + update = update.Where(whereClause, whereArgs...) + } + + if len(returningColumns) > 0 { + if qb.dbType == DBTypePostgreSQL { + update = update.Suffix("RETURNING " + strings.Join(returningColumns, ", ")) + } else { + return "", nil, fmt.Errorf("RETURNING not supported for database type: %s", qb.dbType) + } + } + + sql, args, err := update.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build UPDATE query: %w", err) + } + + return sql, args, nil +} + +// buildSetMap builds a map for SetMap from UpdateData +func (qb *QueryBuilder) buildSetMap(updateData UpdateData) map[string]interface{} { + setMap := make(map[string]interface{}) + for i, col := range updateData.Columns { + setMap[col] = updateData.Values[i] + } + return setMap +} + +// BuildDeleteQuery builds a DELETE query +func (qb *QueryBuilder) BuildDeleteQuery(table string, filters []FilterGroup, returningColumns ...string) (string, []interface{}, error) { + delete := qb.sqlBuilder.Delete(table) + + if len(filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(filters) + if err != nil { + return "", nil, err + } + delete = delete.Where(whereClause, whereArgs...) + } + + if len(returningColumns) > 0 { + if qb.dbType == DBTypePostgreSQL { + delete = delete.Suffix("RETURNING " + strings.Join(returningColumns, ", ")) + } else { + return "", nil, fmt.Errorf("RETURNING not supported for database type: %s", qb.dbType) + } + } + + sql, args, err := delete.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build DELETE query: %w", err) + } + + return sql, args, nil +} + +// BuildUpsertQuery builds an UPSERT query +func (qb *QueryBuilder) BuildUpsertQuery(table string, insertData InsertData, conflictColumns []string, updateColumns []string, returningColumns ...string) (string, []interface{}, error) { + // Validate columns + for _, col := range insertData.Columns { + if qb.allowedColumns != nil && !qb.allowedColumns[col] { + return "", nil, fmt.Errorf("disallowed column: %s", col) + } + } + for _, col := range updateColumns { + if qb.allowedColumns != nil && !qb.allowedColumns[col] { + return "", nil, fmt.Errorf("disallowed column: %s", col) + } + } + + switch qb.dbType { + case DBTypePostgreSQL: + // Handle JSON values for PostgreSQL + allColumns := make([]string, len(insertData.Columns)) + copy(allColumns, insertData.Columns) + + allValues := make([]interface{}, len(insertData.Values)) + copy(allValues, insertData.Values) + + for col, val := range insertData.JsonValues { + allColumns = append(allColumns, col) + jsonVal, err := json.Marshal(val) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + allValues = append(allValues, jsonVal) + } + + insert := qb.sqlBuilder.Insert(table).Columns(allColumns...).Values(allValues...) + if len(conflictColumns) > 0 { + conflictTarget := strings.Join(conflictColumns, ", ") + setClause := "" + for _, col := range updateColumns { + if setClause != "" { + setClause += ", " + } + setClause += fmt.Sprintf("%s = EXCLUDED.%s", qb.escapeIdentifier(col), qb.escapeIdentifier(col)) + } + insert = insert.Suffix(fmt.Sprintf("ON CONFLICT (%s) DO UPDATE SET %s", conflictTarget, setClause)) + } + if len(returningColumns) > 0 { + insert = insert.Suffix("RETURNING " + strings.Join(returningColumns, ", ")) + } + sql, args, err := insert.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build UPSERT query: %w", err) + } + return sql, args, nil + case DBTypeMySQL: + // Handle JSON values for MySQL + allColumns := make([]string, len(insertData.Columns)) + copy(allColumns, insertData.Columns) + + allValues := make([]interface{}, len(insertData.Values)) + copy(allValues, insertData.Values) + + for col, val := range insertData.JsonValues { + allColumns = append(allColumns, col) + jsonVal, err := json.Marshal(val) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + allValues = append(allValues, jsonVal) + } + + insert := qb.sqlBuilder.Insert(table).Columns(allColumns...).Values(allValues...) + if len(updateColumns) > 0 { + setClause := "" + for _, col := range updateColumns { + if setClause != "" { + setClause += ", " + } + setClause += fmt.Sprintf("%s = VALUES(%s)", qb.escapeIdentifier(col), qb.escapeIdentifier(col)) + } + insert = insert.Suffix(fmt.Sprintf("ON DUPLICATE KEY UPDATE %s", setClause)) + } + sql, args, err := insert.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build UPSERT query: %w", err) + } + return sql, args, nil + default: + return "", nil, fmt.Errorf("UPSERT not supported for database type: %s", qb.dbType) + } +} + +// --- QueryParser (for parsing URL query strings) --- + +type QueryParser struct { + defaultLimit int + maxLimit int +} + +func NewQueryParser() *QueryParser { + return &QueryParser{defaultLimit: 10, maxLimit: 100} +} + +func (qp *QueryParser) SetLimits(defaultLimit, maxLimit int) *QueryParser { + qp.defaultLimit = defaultLimit + qp.maxLimit = maxLimit + return qp +} + +// ParseQuery parses URL query parameters into a DynamicQuery struct. +func (qp *QueryParser) ParseQuery(values url.Values, defaultTable string) (DynamicQuery, error) { + query := DynamicQuery{ + From: defaultTable, + Limit: qp.defaultLimit, + Offset: 0, + } + + // Parse fields + if fields := values.Get("fields"); fields != "" { + if fields == "*" { + query.Fields = []SelectField{{Expression: "*"}} + } else { + fieldList := strings.Split(fields, ",") + for _, field := range fieldList { + query.Fields = append(query.Fields, SelectField{Expression: strings.TrimSpace(field)}) + } + } + } else { + query.Fields = []SelectField{{Expression: "*"}} + } + + // Parse pagination + if limit := values.Get("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= qp.maxLimit { + query.Limit = l + } + } + if offset := values.Get("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Parse filters + filters, err := qp.parseFilters(values) + if err != nil { + return query, err + } + query.Filters = filters + + // Parse sorting + sorts, err := qp.parseSorting(values) + if err != nil { + return query, err + } + query.Sort = sorts + + return query, nil +} + +func (qp *QueryParser) parseFilters(values url.Values) ([]FilterGroup, error) { + filterMap := make(map[string]map[string]string) + for key, vals := range values { + if strings.HasPrefix(key, "filter[") && strings.HasSuffix(key, "]") { + parts := strings.Split(key[7:len(key)-1], "][") + if len(parts) == 2 { + column, operator := parts[0], parts[1] + if filterMap[column] == nil { + filterMap[column] = make(map[string]string) + } + if len(vals) > 0 { + filterMap[column][operator] = vals[0] + } + } + } + } + if len(filterMap) == 0 { + return nil, nil + } + var filters []DynamicFilter + for column, operators := range filterMap { + for opStr, value := range operators { + operator := FilterOperator(opStr) + var parsedValue interface{} + switch operator { + case OpIn, OpNotIn: + if value != "" { + parsedValue = strings.Split(value, ",") + } + case OpBetween, OpNotBetween: + if value != "" { + parts := strings.Split(value, ",") + if len(parts) == 2 { + parsedValue = []interface{}{strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1])} + } + } + case OpNull, OpNotNull: + parsedValue = nil + default: + parsedValue = value + } + filters = append(filters, DynamicFilter{Column: column, Operator: operator, Value: parsedValue}) + } + } + if len(filters) == 0 { + return nil, nil + } + return []FilterGroup{{Filters: filters, LogicOp: "AND"}}, nil +} + +func (qp *QueryParser) parseSorting(values url.Values) ([]SortField, error) { + sortParam := values.Get("sort") + if sortParam == "" { + return nil, nil + } + var sorts []SortField + fields := strings.Split(sortParam, ",") + for _, field := range fields { + field = strings.TrimSpace(field) + if field == "" { + continue + } + order, column := "ASC", field + if strings.HasPrefix(field, "-") { + order = "DESC" + column = field[1:] + } else if strings.HasPrefix(field, "+") { + column = field[1:] + } + sorts = append(sorts, SortField{Column: column, Order: order}) + } + return sorts, nil +} + +// ParseQueryWithDefaultFields parses URL query parameters into a DynamicQuery struct with default fields. +func (qp *QueryParser) ParseQueryWithDefaultFields(values url.Values, defaultTable string, defaultFields []string) (DynamicQuery, error) { + query, err := qp.ParseQuery(values, defaultTable) + if err != nil { + return query, err + } + + // If no fields specified, use default fields + if len(query.Fields) == 0 || (len(query.Fields) == 1 && query.Fields[0].Expression == "*") { + query.Fields = make([]SelectField, len(defaultFields)) + for i, field := range defaultFields { + query.Fields[i] = SelectField{Expression: field} + } + } + + return query, nil +} + +// ============================================================================= +// MONGODB QUERY BUILDER +// ============================================================================= + +// MongoQueryBuilder builds MongoDB queries from dynamic filters +type MongoQueryBuilder struct { + allowedFields map[string]bool // Security: only allow specified fields + allowedCollections map[string]bool // Security: only allow specified collections + // Security settings + enableSecurityChecks bool + maxAllowedDocs int + // Query logging + enableQueryLogging bool + // Connection timeout settings + queryTimeout time.Duration +} + +// NewMongoQueryBuilder creates a new MongoDB query builder instance +func NewMongoQueryBuilder() *MongoQueryBuilder { + return &MongoQueryBuilder{ + allowedFields: make(map[string]bool), + allowedCollections: make(map[string]bool), + enableSecurityChecks: true, + maxAllowedDocs: 10000, + enableQueryLogging: true, + queryTimeout: 30 * time.Second, + } +} + +// SetSecurityOptions configures security settings +func (mqb *MongoQueryBuilder) SetSecurityOptions(enableChecks bool, maxDocs int) *MongoQueryBuilder { + mqb.enableSecurityChecks = enableChecks + mqb.maxAllowedDocs = maxDocs + return mqb +} + +// SetAllowedFields sets the list of allowed fields for security +func (mqb *MongoQueryBuilder) SetAllowedFields(fields []string) *MongoQueryBuilder { + mqb.allowedFields = make(map[string]bool) + for _, field := range fields { + mqb.allowedFields[field] = true + } + return mqb +} + +// SetAllowedCollections sets the list of allowed collections for security +func (mqb *MongoQueryBuilder) SetAllowedCollections(collections []string) *MongoQueryBuilder { + mqb.allowedCollections = make(map[string]bool) + for _, collection := range collections { + mqb.allowedCollections[collection] = true + } + return mqb +} + +// SetQueryLogging enables or disables query logging +func (mqb *MongoQueryBuilder) SetQueryLogging(enable bool) *MongoQueryBuilder { + mqb.enableQueryLogging = enable + return mqb +} + +// SetQueryTimeout sets the default query timeout +func (mqb *MongoQueryBuilder) SetQueryTimeout(timeout time.Duration) *MongoQueryBuilder { + mqb.queryTimeout = timeout + return mqb +} + +// BuildFindQuery builds a MongoDB find query from DynamicQuery +func (mqb *MongoQueryBuilder) BuildFindQuery(query DynamicQuery) (bson.M, *options.FindOptions, error) { + filter := bson.M{} + findOptions := options.Find() + + // Security check for limit + if mqb.enableSecurityChecks && query.Limit > mqb.maxAllowedDocs { + return nil, nil, fmt.Errorf("requested limit %d exceeds maximum allowed %d", query.Limit, mqb.maxAllowedDocs) + } + + // Security check for collection name + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[query.From] { + return nil, nil, fmt.Errorf("disallowed collection: %s", query.From) + } + + // Build filter from DynamicQuery filters + if len(query.Filters) > 0 { + mongoFilter, err := mqb.buildFilter(query.Filters) + if err != nil { + return nil, nil, err + } + filter = mongoFilter + } + + // Set projection from fields + if len(query.Fields) > 0 { + projection := bson.M{} + for _, field := range query.Fields { + if field.Expression == "*" { + // Include all fields + continue + } + fieldName := field.Expression + if field.Alias != "" { + fieldName = field.Alias + } + if mqb.allowedFields != nil && !mqb.allowedFields[fieldName] { + return nil, nil, fmt.Errorf("disallowed field: %s", fieldName) + } + projection[fieldName] = 1 + } + if len(projection) > 0 { + findOptions.SetProjection(projection) + } + } + + // Set sort + if len(query.Sort) > 0 { + sort := bson.D{} + for _, sortField := range query.Sort { + fieldName := sortField.Column + if mqb.allowedFields != nil && !mqb.allowedFields[fieldName] { + return nil, nil, fmt.Errorf("disallowed field: %s", fieldName) + } + order := 1 // ASC + if strings.ToUpper(sortField.Order) == "DESC" { + order = -1 // DESC + } + sort = append(sort, bson.E{Key: fieldName, Value: order}) + } + findOptions.SetSort(sort) + } + + // Set limit and offset + if query.Limit > 0 { + findOptions.SetLimit(int64(query.Limit)) + } + if query.Offset > 0 { + findOptions.SetSkip(int64(query.Offset)) + } + + return filter, findOptions, nil +} + +// BuildAggregateQuery builds a MongoDB aggregation pipeline from DynamicQuery +func (mqb *MongoQueryBuilder) BuildAggregateQuery(query DynamicQuery) ([]bson.D, error) { + pipeline := []bson.D{} + + // Security check for collection name + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[query.From] { + return nil, fmt.Errorf("disallowed collection: %s", query.From) + } + + // Handle CTEs as stages in the pipeline + if len(query.CTEs) > 0 { + for _, cte := range query.CTEs { + // Security check for CTE collection + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[cte.Query.From] { + return nil, fmt.Errorf("disallowed collection in CTE: %s", cte.Query.From) + } + + subPipeline, err := mqb.BuildAggregateQuery(cte.Query) + if err != nil { + return nil, fmt.Errorf("failed to build CTE '%s': %w", cte.Name, err) + } + // Add $lookup stage for joins + if len(cte.Query.Joins) > 0 { + for _, join := range cte.Query.Joins { + // Security check for joined collection + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[join.Table] { + return nil, fmt.Errorf("disallowed collection in join: %s", join.Table) + } + + lookupStage := bson.D{ + {Key: "$lookup", Value: bson.D{ + {Key: "from", Value: join.Table}, + {Key: "localField", Value: join.Alias}, + {Key: "foreignField", Value: "_id"}, + {Key: "as", Value: join.Alias}, + }}, + } + pipeline = append(pipeline, lookupStage) + } + } + // Add the sub-pipeline + pipeline = append(pipeline, subPipeline...) + } + } + + // Match stage for filters + if len(query.Filters) > 0 { + filter, err := mqb.buildFilter(query.Filters) + if err != nil { + return nil, err + } + pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) + } + + // Group stage for GROUP BY + if len(query.GroupBy) > 0 { + groupID := bson.D{} + for _, field := range query.GroupBy { + if mqb.allowedFields != nil && !mqb.allowedFields[field] { + return nil, fmt.Errorf("disallowed field: %s", field) + } + groupID = append(groupID, bson.E{Key: field, Value: "$" + field}) + } + + groupStage := bson.D{ + {Key: "$group", Value: bson.D{ + {Key: "_id", Value: groupID}, + }}, + } + + // Add any aggregations from fields + for _, field := range query.Fields { + if strings.Contains(field.Expression, "(") && strings.Contains(field.Expression, ")") { + // This is an aggregation function + funcName := strings.Split(field.Expression, "(")[0] + funcField := strings.TrimSuffix(strings.Split(field.Expression, "(")[1], ")") + + if mqb.allowedFields != nil && !mqb.allowedFields[funcField] { + return nil, fmt.Errorf("disallowed field: %s", funcField) + } + + switch strings.ToLower(funcName) { + case "count": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$sum", Value: 1}}, + }) + case "sum": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$sum", Value: "$" + funcField}}, + }) + case "avg": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$avg", Value: "$" + funcField}}, + }) + case "min": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$min", Value: "$" + funcField}}, + }) + case "max": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$max", Value: "$" + funcField}}, + }) + } + } + } + + pipeline = append(pipeline, groupStage) + } + + // Sort stage + if len(query.Sort) > 0 { + sort := bson.D{} + for _, sortField := range query.Sort { + fieldName := sortField.Column + if mqb.allowedFields != nil && !mqb.allowedFields[fieldName] { + return nil, fmt.Errorf("disallowed field: %s", fieldName) + } + order := 1 // ASC + if strings.ToUpper(sortField.Order) == "DESC" { + order = -1 // DESC + } + sort = append(sort, bson.E{Key: fieldName, Value: order}) + } + pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) + } + + // Skip and limit stages + if query.Offset > 0 { + pipeline = append(pipeline, bson.D{{Key: "$skip", Value: query.Offset}}) + } + if query.Limit > 0 { + pipeline = append(pipeline, bson.D{{Key: "$limit", Value: query.Limit}}) + } + + return pipeline, nil +} + +// buildFilter builds a MongoDB filter from FilterGroups +func (mqb *MongoQueryBuilder) buildFilter(filterGroups []FilterGroup) (bson.M, error) { + if len(filterGroups) == 0 { + return bson.M{}, nil + } + + var result bson.M + var err error + + for i, group := range filterGroups { + if len(group.Filters) == 0 { + continue + } + + groupFilter, err := mqb.buildFilterGroup(group) + if err != nil { + return nil, err + } + + if i == 0 { + result = groupFilter + } else { + logicOp := "$and" + if group.LogicOp != "" { + switch strings.ToUpper(group.LogicOp) { + case "OR": + logicOp = "$or" + } + } + result = bson.M{logicOp: []bson.M{result, groupFilter}} + } + } + + return result, err +} + +// buildFilterGroup builds a filter for a single filter group +func (mqb *MongoQueryBuilder) buildFilterGroup(group FilterGroup) (bson.M, error) { + var filters []bson.M + logicOp := "$and" + if group.LogicOp != "" { + switch strings.ToUpper(group.LogicOp) { + case "OR": + logicOp = "$or" + } + } + + for _, filter := range group.Filters { + fieldFilter, err := mqb.buildFilterCondition(filter) + if err != nil { + return nil, err + } + filters = append(filters, fieldFilter) + } + + if len(filters) == 1 { + return filters[0], nil + } + return bson.M{logicOp: filters}, nil +} + +// buildFilterCondition builds a single filter condition for MongoDB +func (mqb *MongoQueryBuilder) buildFilterCondition(filter DynamicFilter) (bson.M, error) { + field := filter.Column + if mqb.allowedFields != nil && !mqb.allowedFields[field] { + return nil, fmt.Errorf("disallowed field: %s", field) + } + + switch filter.Operator { + case OpEqual: + return bson.M{field: filter.Value}, nil + case OpNotEqual: + return bson.M{field: bson.M{"$ne": filter.Value}}, nil + case OpIn: + values := mqb.parseArrayValue(filter.Value) + return bson.M{field: bson.M{"$in": values}}, nil + case OpNotIn: + values := mqb.parseArrayValue(filter.Value) + return bson.M{field: bson.M{"$nin": values}}, nil + case OpGreaterThan: + return bson.M{field: bson.M{"$gt": filter.Value}}, nil + case OpGreaterThanEqual: + return bson.M{field: bson.M{"$gte": filter.Value}}, nil + case OpLessThan: + return bson.M{field: bson.M{"$lt": filter.Value}}, nil + case OpLessThanEqual: + return bson.M{field: bson.M{"$lte": filter.Value}}, nil + case OpLike: + // Convert SQL LIKE to MongoDB regex + pattern := filter.Value.(string) + pattern = strings.ReplaceAll(pattern, "%", ".*") + pattern = strings.ReplaceAll(pattern, "_", ".") + return bson.M{field: bson.M{"$regex": pattern, "$options": "i"}}, nil + case OpILike: + // Case-insensitive like + pattern := filter.Value.(string) + pattern = strings.ReplaceAll(pattern, "%", ".*") + pattern = strings.ReplaceAll(pattern, "_", ".") + return bson.M{field: bson.M{"$regex": pattern, "$options": "i"}}, nil + case OpContains: + // Contains substring + pattern := filter.Value.(string) + return bson.M{field: bson.M{"$regex": pattern, "$options": "i"}}, nil + case OpNotContains: + // Does not contain substring + pattern := filter.Value.(string) + return bson.M{field: bson.M{"$not": bson.M{"$regex": pattern, "$options": "i"}}}, nil + case OpStartsWith: + // Starts with + pattern := filter.Value.(string) + return bson.M{field: bson.M{"$regex": "^" + pattern, "$options": "i"}}, nil + case OpEndsWith: + // Ends with + pattern := filter.Value.(string) + return bson.M{field: bson.M{"$regex": pattern + "$", "$options": "i"}}, nil + case OpNull: + return bson.M{field: bson.M{"$exists": false}}, nil + case OpNotNull: + return bson.M{field: bson.M{"$exists": true}}, nil + case OpJsonContains: + // JSON contains + return bson.M{field: bson.M{"$elemMatch": filter.Value}}, nil + case OpJsonNotContains: + // JSON does not contain + return bson.M{field: bson.M{"$not": bson.M{"$elemMatch": filter.Value}}}, nil + case OpJsonExists: + // JSON path exists + return bson.M{field + "." + filter.Options["path"].(string): bson.M{"$exists": true}}, nil + case OpJsonNotExists: + // JSON path does not exist + return bson.M{field + "." + filter.Options["path"].(string): bson.M{"$exists": false}}, nil + case OpArrayContains: + // Array contains + return bson.M{field: bson.M{"$elemMatch": bson.M{"$eq": filter.Value}}}, nil + case OpArrayNotContains: + // Array does not contain + return bson.M{field: bson.M{"$not": bson.M{"$elemMatch": bson.M{"$eq": filter.Value}}}}, nil + case OpArrayLength: + // Array length + if lengthOption, ok := filter.Options["length"].(int); ok { + return bson.M{field: bson.M{"$size": lengthOption}}, nil + } + return nil, fmt.Errorf("array_length operator requires 'length' option") + default: + return nil, fmt.Errorf("unsupported operator: %s", filter.Operator) + } +} + +// parseArrayValue parses an array value for MongoDB +func (mqb *MongoQueryBuilder) parseArrayValue(value interface{}) []interface{} { + if value == nil { + return nil + } + if reflect.TypeOf(value).Kind() == reflect.Slice { + v := reflect.ValueOf(value) + result := make([]interface{}, v.Len()) + for i := 0; i < v.Len(); i++ { + result[i] = v.Index(i).Interface() + } + return result + } + if str, ok := value.(string); ok { + if strings.Contains(str, ",") { + parts := strings.Split(str, ",") + result := make([]interface{}, len(parts)) + for i, part := range parts { + result[i] = strings.TrimSpace(part) + } + return result + } + return []interface{}{str} + } + return []interface{}{value} +} + +// ExecuteFind executes a MongoDB find query +func (mqb *MongoQueryBuilder) ExecuteFind(ctx context.Context, collection *mongo.Collection, query DynamicQuery, dest interface{}) error { + // Security check for collection name + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[collection.Name()] { + return fmt.Errorf("disallowed collection: %s", collection.Name()) + } + + filter, findOptions, err := mqb.BuildFindQuery(query) + if err != nil { + return err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && mqb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, mqb.queryTimeout) + defer cancel() + } + + start := time.Now() + cursor, err := collection.Find(ctx, filter, findOptions) + if err != nil { + return err + } + defer cursor.Close(ctx) + err = cursor.All(ctx, dest) + if mqb.enableQueryLogging { + fmt.Printf("[DEBUG] MongoDB Find executed in %v\n", time.Since(start)) + } + return err +} + +// ExecuteAggregate executes a MongoDB aggregation pipeline +func (mqb *MongoQueryBuilder) ExecuteAggregate(ctx context.Context, collection *mongo.Collection, query DynamicQuery, dest interface{}) error { + // Security check for collection name + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[collection.Name()] { + return fmt.Errorf("disallowed collection: %s", collection.Name()) + } + + pipeline, err := mqb.BuildAggregateQuery(query) + if err != nil { + return err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && mqb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, mqb.queryTimeout) + defer cancel() + } + + start := time.Now() + cursor, err := collection.Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + err = cursor.All(ctx, dest) + if mqb.enableQueryLogging { + fmt.Printf("[DEBUG] MongoDB Aggregate executed in %v\n", time.Since(start)) + } + return err +} + +// ExecuteCount executes a MongoDB count query +func (mqb *MongoQueryBuilder) ExecuteCount(ctx context.Context, collection *mongo.Collection, query DynamicQuery) (int64, error) { + // Security check for collection name + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[collection.Name()] { + return 0, fmt.Errorf("disallowed collection: %s", collection.Name()) + } + + filter, _, err := mqb.BuildFindQuery(query) + if err != nil { + return 0, err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && mqb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, mqb.queryTimeout) + defer cancel() + } + + start := time.Now() + count, err := collection.CountDocuments(ctx, filter) + if mqb.enableQueryLogging { + fmt.Printf("[DEBUG] MongoDB Count executed in %v\n", time.Since(start)) + } + return count, err +} + +// ExecuteInsert executes a MongoDB insert operation +func (mqb *MongoQueryBuilder) ExecuteInsert(ctx context.Context, collection *mongo.Collection, data InsertData) (*mongo.InsertOneResult, error) { + // Security check for collection name + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[collection.Name()] { + return nil, fmt.Errorf("disallowed collection: %s", collection.Name()) + } + + document := bson.M{} + for i, col := range data.Columns { + if mqb.allowedFields != nil && !mqb.allowedFields[col] { + return nil, fmt.Errorf("disallowed field: %s", col) + } + document[col] = data.Values[i] + } + + // Handle JSON values + for col, val := range data.JsonValues { + if mqb.allowedFields != nil && !mqb.allowedFields[col] { + return nil, fmt.Errorf("disallowed field: %s", col) + } + document[col] = val + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && mqb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, mqb.queryTimeout) + defer cancel() + } + + start := time.Now() + result, err := collection.InsertOne(ctx, document) + if mqb.enableQueryLogging { + fmt.Printf("[DEBUG] MongoDB Insert executed in %v\n", time.Since(start)) + } + return result, err +} + +// ExecuteUpdate executes a MongoDB update operation +func (mqb *MongoQueryBuilder) ExecuteUpdate(ctx context.Context, collection *mongo.Collection, updateData UpdateData, filters []FilterGroup) (*mongo.UpdateResult, error) { + // Security check for collection name + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[collection.Name()] { + return nil, fmt.Errorf("disallowed collection: %s", collection.Name()) + } + + filter, err := mqb.buildFilter(filters) + if err != nil { + return nil, err + } + + update := bson.M{"$set": bson.M{}} + for i, col := range updateData.Columns { + if mqb.allowedFields != nil && !mqb.allowedFields[col] { + return nil, fmt.Errorf("disallowed field: %s", col) + } + update["$set"].(bson.M)[col] = updateData.Values[i] + } + + // Handle JSON updates + for col, jsonUpdate := range updateData.JsonUpdates { + if mqb.allowedFields != nil && !mqb.allowedFields[col] { + return nil, fmt.Errorf("disallowed field: %s", col) + } + // Use dot notation for nested JSON updates + update["$set"].(bson.M)[col+"."+jsonUpdate.Path] = jsonUpdate.Value + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && mqb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, mqb.queryTimeout) + defer cancel() + } + + start := time.Now() + result, err := collection.UpdateMany(ctx, filter, update) + if mqb.enableQueryLogging { + fmt.Printf("[DEBUG] MongoDB Update executed in %v\n", time.Since(start)) + } + return result, err +} + +// ExecuteDelete executes a MongoDB delete operation +func (mqb *MongoQueryBuilder) ExecuteDelete(ctx context.Context, collection *mongo.Collection, filters []FilterGroup) (*mongo.DeleteResult, error) { + // Security check for collection name + if mqb.enableSecurityChecks && len(mqb.allowedCollections) > 0 && !mqb.allowedCollections[collection.Name()] { + return nil, fmt.Errorf("disallowed collection: %s", collection.Name()) + } + + filter, err := mqb.buildFilter(filters) + if err != nil { + return nil, err + } + + // Set timeout if not already in context + if _, hasDeadline := ctx.Deadline(); !hasDeadline && mqb.queryTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, mqb.queryTimeout) + defer cancel() + } + + start := time.Now() + result, err := collection.DeleteMany(ctx, filter) + if mqb.enableQueryLogging { + fmt.Printf("[DEBUG] MongoDB Delete executed in %v\n", time.Since(start)) + } + return result, err +} diff --git a/internal/utils/validation/duplicate.go b/internal/utils/validation/duplicate.go new file mode 100644 index 0000000..d28fcbf --- /dev/null +++ b/internal/utils/validation/duplicate.go @@ -0,0 +1,244 @@ +package validation + +import ( + "context" + "fmt" + + queryUtils "api-service/internal/utils/query" + + "github.com/jmoiron/sqlx" +) + +// ============================================================================= +// DYNAMIC VALIDATION RULE +// ============================================================================= + +// ValidationRule mendefinisikan aturan untuk memeriksa duplikat atau kondisi lain. +// Struct ini membuat validator dapat digunakan kembali untuk tabel apa pun. +type ValidationRule struct { + // TableName adalah nama tabel yang akan diperiksa. + TableName string + + // UniqueColumns adalah daftar kolom yang, jika digabungkan, harus unik. + // Contoh: []string{"email"} atau []string{"first_name", "last_name", "dob"} + UniqueColumns []string + + // Conditions adalah filter tambahan yang harus dipenuhi. + // Ini sangat berguna untuk aturan bisnis, seperti "status != 'deleted'". + // Gunakan queryUtils.DynamicFilter untuk fleksibilitas penuh. + Conditions []queryUtils.DynamicFilter + + // ExcludeIDColumn dan ExcludeIDValue digunakan untuk operasi UPDATE, + // untuk memastikan bahwa record tidak membandingkan dirinya sendiri. + ExcludeIDColumn string + ExcludeIDValue interface{} +} + +// NewUniqueFieldRule adalah helper untuk membuat aturan validasi unik untuk satu kolom. +// Ini adalah cara cepat untuk membuat aturan yang paling umum. +func NewUniqueFieldRule(tableName, uniqueColumn string, additionalConditions ...queryUtils.DynamicFilter) ValidationRule { + return ValidationRule{ + TableName: tableName, + UniqueColumns: []string{uniqueColumn}, + Conditions: additionalConditions, + } +} + +// ============================================================================= +// DYNAMIC VALIDATOR +// ============================================================================= + +// DynamicValidator menyediakan metode untuk menjalankan validasi berdasarkan ValidationRule. +// Ini sepenuhnya generik dan tidak terikat pada tabel atau model tertentu. +type DynamicValidator struct { + qb *queryUtils.QueryBuilder +} + +// NewDynamicValidator membuat instance DynamicValidator baru. +func NewDynamicValidator(qb *queryUtils.QueryBuilder) *DynamicValidator { + return &DynamicValidator{qb: qb} +} + +// Validate menjalankan validasi terhadap aturan yang diberikan. +// `data` adalah map yang berisi nilai untuk kolom yang akan diperiksa (biasanya dari request body). +// Mengembalikan `true` jika ada duplikat yang ditemukan (validasi gagal), `false` jika tidak ada duplikat (validasi berhasil). +func (dv *DynamicValidator) Validate(ctx context.Context, db *sqlx.DB, rule ValidationRule, data map[string]interface{}) (bool, error) { + // LOGGING: Log validation start + fmt.Printf("[VALIDATION] Starting validation for table: %s, unique columns: %v, data: %v\n", rule.TableName, rule.UniqueColumns, data) + + if len(rule.UniqueColumns) == 0 { + fmt.Printf("[VALIDATION] ERROR: ValidationRule must have at least one UniqueColumn\n") + return false, fmt.Errorf("ValidationRule must have at least one UniqueColumn") + } + + // 1. Kumpulkan semua filter dari aturan + var allFilters []queryUtils.DynamicFilter + + // Tambahkan kondisi tambahan (misalnya, status != 'deleted') + allFilters = append(allFilters, rule.Conditions...) + fmt.Printf("[VALIDATION] Added %d condition filters\n", len(rule.Conditions)) + + // 2. Bangun filter untuk kolom unik berdasarkan data yang diberikan + for _, colName := range rule.UniqueColumns { + value, exists := data[colName] + if !exists { + // Jika data untuk kolom unik tidak ada, ini adalah kesalahan pemrograman. + fmt.Printf("[VALIDATION] ERROR: data for unique column '%s' not found in provided data map\n", colName) + return false, fmt.Errorf("data for unique column '%s' not found in provided data map", colName) + } + allFilters = append(allFilters, queryUtils.DynamicFilter{ + Column: colName, + Operator: queryUtils.OpEqual, + Value: value, + }) + fmt.Printf("[VALIDATION] Added filter for column '%s' with value: %v\n", colName, value) + } + + // 3. Tambahkan filter pengecualian ID (untuk operasi UPDATE) + if rule.ExcludeIDColumn != "" { + allFilters = append(allFilters, queryUtils.DynamicFilter{ + Column: rule.ExcludeIDColumn, + Operator: queryUtils.OpNotEqual, + Value: rule.ExcludeIDValue, + }) + fmt.Printf("[VALIDATION] Added exclude filter for column '%s' with value: %v\n", rule.ExcludeIDColumn, rule.ExcludeIDValue) + } + + // 4. Bangun dan eksekusi query untuk menghitung jumlah record yang cocok + query := queryUtils.DynamicQuery{ + From: rule.TableName, + Filters: []queryUtils.FilterGroup{{Filters: allFilters, LogicOp: "AND"}}, + } + + fmt.Printf("[VALIDATION] Built query with %d total filters\n", len(allFilters)) + + count, err := dv.qb.ExecuteCount(ctx, db, query) + if err != nil { + fmt.Printf("[VALIDATION] ERROR: failed to execute validation query for table %s: %v\n", rule.TableName, err) + return false, fmt.Errorf("failed to execute validation query for table %s: %w", rule.TableName, err) + } + + fmt.Printf("[VALIDATION] Query executed successfully, count result: %d\n", count) + + // 5. Kembalikan hasil + result := count > 0 + fmt.Printf("[VALIDATION] Validation result: isDuplicate=%t (count > 0: %d > 0 = %t)\n", result, count, result) + return result, nil +} + +// ============================================================================= +// CONTOH PENGGUNAAN (UNTUK DITEMPATKAN DI HANDLER ANDA) +// ============================================================================= + +/* +// --- Cara Penggunaan di RetribusiHandler --- + +// 1. Tambahkan DynamicValidator ke struct handler +type RetribusiHandler struct { + // ... + validator *validation.DynamicValidator +} + +// 2. Inisialisasi di constructor +func NewRetribusiHandler() *RetribusiHandler { + qb := queryUtils.NewQueryBuilder(queryUtils.DBTypePostgreSQL).SetAllowedColumns(...) + + return &RetribusiHandler{ + // ... + validator: validation.NewDynamicValidator(qb), + } +} + +// 3. Gunakan di CreateRetribusi +func (h *RetribusiHandler) CreateRetribusi(c *gin.Context) { + var req retribusi.RetribusiCreateRequest + // ... bind dan validasi request ... + + // Siapkan aturan validasi: KodeTarif harus unik di antara record yang tidak dihapus. + rule := validation.NewUniqueFieldRule( + "data_retribusi", // Nama tabel + "Kode_tarif", // Kolom yang harus unik + queryUtils.DynamicFilter{ // Kondisi tambahan + Column: "status", + Operator: queryUtils.OpNotEqual, + Value: "deleted", + }, + ) + + // Siapkan data dari request untuk divalidasi + dataToValidate := map[string]interface{}{ + "Kode_tarif": req.KodeTarif, + } + + // Eksekusi validasi + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate Kode Tarif", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "Kode Tarif already exists", fmt.Errorf("duplicate Kode Tarif: %s", req.KodeTarif), http.StatusConflict) + return + } + + // ... lanjutkan proses create ... +} + +// 4. Gunakan di UpdateRetribusi +func (h *RetribusiHandler) UpdateRetribusi(c *gin.Context) { + id := c.Param("id") + var req retribusi.RetribusiUpdateRequest + // ... bind dan validasi request ... + + // Siapkan aturan validasi: KodeTarif harus unik, kecuali untuk record dengan ID ini. + rule := validation.ValidationRule{ + TableName: "data_retribusi", + UniqueColumns: []string{"Kode_tarif"}, + Conditions: []queryUtils.DynamicFilter{ + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + ExcludeIDColumn: "id", // Kecualikan berdasarkan kolom 'id' + ExcludeIDValue: id, // ...dengan nilai ID dari parameter + } + + dataToValidate := map[string]interface{}{ + "Kode_tarif": req.KodeTarif, + } + + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate Kode Tarif", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "Kode Tarif already exists", fmt.Errorf("duplicate Kode Tarif: %s", req.KodeTarif), http.StatusConflict) + return + } + + // ... lanjutkan proses update ... +} + +// --- Contoh Penggunaan untuk Kasus Lain --- + +// Contoh: Validasi kombinasi unik untuk tabel 'users' +// (email dan company_id harus unik bersama-sama) +func (h *UserHandler) CreateUser(c *gin.Context) { + // ... + + rule := validation.ValidationRule{ + TableName: "users", + UniqueColumns: []string{"email", "company_id"}, // Unik komposit + } + + dataToValidate := map[string]interface{}{ + "email": req.Email, + "company_id": req.CompanyID, + } + + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + // ... handle error dan duplicate +} + +*/ diff --git a/pkg/validator/validator b/pkg/validator/validator new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/pkg/validator/validator @@ -0,0 +1 @@ +