diff --git a/config.yaml b/config.yaml new file mode 100644 index 0000000..2e44d78 --- /dev/null +++ b/config.yaml @@ -0,0 +1,14 @@ +auth: + type: static # Options: jwt, keycloak, static, hybrid (for hybrid mode keycloak is primary and jwt is fallback) + static_tokens: + - token1 + - token2 + - token3 + - token4 + fallback_to: jwt # Options: keycloak, static, jwt (for hybrid mode keycloak is primary and jwt is fallback) +keycloak: + enabled: true + issuer: https://auth.rssa.top/realms/sandbox + audience: nuxtsim-pendaftaran + jwks_url: https://auth.rssa.top/realms/sandbox/protocol/openid-connect/certs + diff --git a/go.mod b/go.mod index 5dd4cb6..cb2442d 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,6 @@ require ( github.com/gin-gonic/gin v1.10.1 github.com/golang-jwt/jwt/v5 v5.3.0 github.com/google/uuid v1.6.0 - github.com/gorilla/websocket v1.5.1 github.com/jackc/pgx/v5 v5.7.2 // Ensure pgx is a direct dependency go.mongodb.org/mongo-driver v1.17.3 golang.org/x/crypto v0.41.0 @@ -17,18 +16,22 @@ require ( ) require ( - github.com/daku10/go-lz-string v0.0.6 + github.com/Masterminds/squirrel v1.5.4 + github.com/gin-contrib/cors v1.7.6 github.com/go-playground/validator/v10 v10.27.0 + github.com/go-redis/redis_rate/v10 v10.0.1 github.com/go-sql-driver/mysql v1.8.1 + github.com/jmoiron/sqlx v1.4.0 github.com/joho/godotenv v1.5.1 github.com/lib/pq v1.10.9 - github.com/mashingan/smapping v0.1.19 - github.com/rs/zerolog v1.34.0 + github.com/redis/go-redis/v9 v9.16.0 + github.com/spf13/viper v1.21.0 github.com/swaggo/files v1.0.1 github.com/swaggo/gin-swagger v1.6.0 github.com/swaggo/swag v1.16.6 - github.com/tidwall/gjson v1.18.0 + golang.org/x/time v0.14.0 gopkg.in/yaml.v2 v2.4.0 + gopkg.in/yaml.v3 v3.0.1 ) require ( @@ -38,7 +41,10 @@ require ( github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect github.com/bytedance/sonic v1.14.0 // indirect github.com/bytedance/sonic/loader v0.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect github.com/gabriel-vasile/mimetype v1.4.9 // indirect github.com/gin-contrib/sse v1.1.0 // indirect github.com/go-openapi/jsonpointer v0.19.5 // indirect @@ -47,6 +53,7 @@ require ( github.com/go-openapi/swag v0.19.15 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/goccy/go-json v0.10.5 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect github.com/golang-sql/sqlexp v0.1.0 // indirect @@ -60,9 +67,10 @@ require ( github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect + github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect github.com/leodido/go-urn v1.4.0 // indirect github.com/mailru/easyjson v0.7.6 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/microsoft/go-mssqldb v1.8.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect @@ -70,14 +78,19 @@ require ( github.com/montanaflynn/stats v0.7.1 // indirect github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/rogpeppe/go-internal v1.14.1 // indirect - github.com/tidwall/match v1.1.1 // indirect - github.com/tidwall/pretty v1.2.0 // indirect + github.com/sagikazarmark/locafero v0.11.0 // indirect + github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect + github.com/spf13/afero v1.15.0 // indirect + github.com/spf13/cast v1.10.0 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/subosito/gotenv v1.6.0 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.3.0 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect golang.org/x/arch v0.20.0 // indirect golang.org/x/mod v0.26.0 // indirect golang.org/x/net v0.43.0 // indirect @@ -85,6 +98,5 @@ require ( golang.org/x/text v0.28.0 // indirect golang.org/x/tools v0.35.0 // indirect google.golang.org/protobuf v1.36.7 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect gorm.io/gorm v1.30.0 // indirect ) diff --git a/go.sum b/go.sum index 7ecaabb..4fc4172 100644 --- a/go.sum +++ b/go.sum @@ -20,27 +20,40 @@ github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mx github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= +github.com/Masterminds/squirrel v1.5.4 h1:uUcX/aBc8O7Fg9kaISIUsHXdKuqehiXAMQTYX8afzqM= +github.com/Masterminds/squirrel v1.5.4/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA400rg+riTZj10= github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ= github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA= github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA= github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= -github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/daku10/go-lz-string v0.0.6 h1:aO8FFp4QPuNp7+WNyh1DyNjGF3UbZu95tUv9xOZNsYQ= -github.com/daku10/go-lz-string v0.0.6/go.mod h1:Vk++rSG3db8HXJaHEAbxiy/ukjTmPBw/iI+SrVZDzfs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= +github.com/gin-contrib/cors v1.7.6 h1:3gQ8GMzs1Ylpf70y8bMw4fVpycXIeX1ZemuSQIsnQQY= +github.com/gin-contrib/cors v1.7.6/go.mod h1:Ulcl+xN4jel9t1Ry8vqph23a60FwH9xVLd+3ykmTjOk= github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4= github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk= github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= @@ -65,11 +78,14 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= +github.com/go-redis/redis_rate/v10 v10.0.1 h1:calPxi7tVlxojKunJwQ72kwfozdy25RjA0bCj1h0MUo= +github.com/go-redis/redis_rate/v10 v10.0.1/go.mod h1:EMiuO9+cjRkR7UvdvwMO7vbgqJkltQHtwbdIQvaBKIU= github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= -github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= @@ -90,8 +106,6 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= -github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= -github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= @@ -112,6 +126,8 @@ github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= +github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= @@ -132,6 +148,10 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= @@ -140,14 +160,10 @@ github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/mashingan/smapping v0.1.19 h1:SsEtuPn2UcM1croIupPtGLgWgpYRuS0rSQMvKD9g2BQ= -github.com/mashingan/smapping v0.1.19/go.mod h1:FjfiwFxGOuNxL/OT1WcrNAwTPx0YJeg5JiXwBB1nyig= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= +github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/microsoft/go-mssqldb v1.8.2 h1:236sewazvC8FvG6Dr3bszrVhMkAl4KYImryLkRMCd0I= github.com/microsoft/go-mssqldb v1.8.2/go.mod h1:vp38dT33FGfVotRiTmDo3bFyaHq+p3LektQrjTULowo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -166,20 +182,31 @@ github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzL github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/redis/go-redis/v9 v9.16.0 h1:OotgqgLSRCmzfqChbQyG1PHC3tLNR89DG4jdOERSEP4= +github.com/redis/go-redis/v9 v9.16.0/go.mod h1:u410H11HMLoB+TP67dz8rL9s6QW2j76l0//kSOd3370= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= -github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= -github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY= -github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ= +github.com/sagikazarmark/locafero v0.11.0 h1:1iurJgmM9G3PA/I+wWYIOw/5SyBtxapeHDcg+AAIFXc= +github.com/sagikazarmark/locafero v0.11.0/go.mod h1:nVIGvgyzw595SUSUE6tvCp3YYTeHs15MvlmU87WwIik= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U= +github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I= +github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg= +github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY= +github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU= +github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= @@ -189,20 +216,16 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= +github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE= github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg= github.com/swaggo/gin-swagger v1.6.0 h1:y8sxvQ3E20/RCyrXeFfg60r6H0Z+SwpTjMYsMm+zy8M= github.com/swaggo/gin-swagger v1.6.0/go.mod h1:BG00cCEy294xtVpyIAHG6+e2Qzj/xKlRdOqDkvq0uzo= github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI= github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg= -github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= -github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= -github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= -github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= -github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA= @@ -218,6 +241,8 @@ github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfS github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ= go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c= golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -278,7 +303,6 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -326,6 +350,8 @@ golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= diff --git a/internal/config/config.go b/internal/config/config.go index f34deb4..c9bf8a1 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -13,16 +13,20 @@ import ( "time" "github.com/go-playground/validator/v10" + "github.com/spf13/viper" + "gopkg.in/yaml.v3" ) type Config struct { Server ServerConfig Databases map[string]DatabaseConfig ReadReplicas map[string][]DatabaseConfig // For read replicas + Auth AuthConfig Keycloak KeycloakConfig Bpjs BpjsConfig SatuSehat SatuSehatConfig Swagger SwaggerConfig + Security SecurityConfig Validator *validator.Validate } @@ -63,6 +67,25 @@ type DatabaseConfig struct { ConnMaxLifetime time.Duration // Connection max lifetime } +type AuthConfig struct { + Type string `yaml:"type" env:"AUTH_TYPE"` // "keycloak", "jwt", "static", "hybrid" + StaticTokens []string `yaml:"static_tokens" env:"AUTH_STATIC_TOKENS"` // Support multiple static tokens + FallbackTo string `yaml:"fallback_to" env:"AUTH_FALLBACK_TO"` // fallback auth type if primary fails +} + +// AuthYAMLConfig represents the auth section in config.yaml +type AuthYAMLConfig struct { + Type string `yaml:"type"` + StaticTokens []string `yaml:"static_tokens"` + FallbackTo string `yaml:"fallback_to"` +} +type KeycloakYAMLConfig struct { + Issuer string `yaml:"issuer"` + Audience string `yaml:"audience"` + JwksURL string `yaml:"jwks_url"` + Enabled bool `yaml:"enabled"` +} + type KeycloakConfig struct { Issuer string Audience string @@ -90,27 +113,30 @@ type SatuSehatConfig struct { Timeout time.Duration `json:"timeout"` } -// SetHeader generates required headers for BPJS VClaim API -// func (cfg BpjsConfig) SetHeader() (string, string, string, string, string) { -// timenow := time.Now().UTC() -// t, err := time.Parse(time.RFC3339, "1970-01-01T00:00:00Z") -// if err != nil { -// log.Fatal(err) -// } +// SecurityConfig berisi semua pengaturan untuk middleware keamanan +type SecurityConfig struct { + // CORS + TrustedOrigins []string `mapstructure:"trusted_origins"` + // Rate Limiting + RateLimit RateLimitConfig `mapstructure:"rate_limit"` + // Input Validation + MaxInputLength int `mapstructure:"max_input_length"` +} -// tstamp := timenow.Unix() - t.Unix() -// secret := []byte(cfg.SecretKey) -// message := []byte(cfg.ConsID + "&" + fmt.Sprint(tstamp)) -// hash := hmac.New(sha256.New, secret) -// hash.Write(message) +// RateLimitConfig berisi pengaturan untuk rate limiter +type RateLimitConfig struct { + RequestsPerMinute int `mapstructure:"requests_per_minute"` + Redis RedisConfig `mapstructure:"redis"` +} -// // to lowercase hexits -// hex.EncodeToString(hash.Sum(nil)) -// // to base64 -// xSignature := base64.StdEncoding.EncodeToString(hash.Sum(nil)) +// RedisConfig berisi detail koneksi ke Redis +type RedisConfig struct { + Host string `mapstructure:"host"` + Port int `mapstructure:"port"` + Password string `mapstructure:"password"` + DB int `mapstructure:"db"` +} -// return cfg.ConsID, cfg.SecretKey, cfg.UserKey, fmt.Sprint(tstamp), xSignature -// } func (cfg BpjsConfig) SetHeader() (string, string, string, string, string) { timenow := time.Now().UTC() t, err := time.Parse(time.RFC3339, "1970-01-01T00:00:00Z") @@ -149,6 +175,7 @@ func (cfg ConfigBpjs) SetHeader() (string, string, string, string, string) { } func LoadConfig() *Config { + log.Printf("DEBUG: Raw ENV for SECURITY_MAX_INPUT_LENGTH is: '%s'", os.Getenv("SECURITY_MAX_INPUT_LENGTH")) config := &Config{ Server: ServerConfig{ Port: getEnvAsInt("PORT", 8080), @@ -156,12 +183,8 @@ func LoadConfig() *Config { }, Databases: make(map[string]DatabaseConfig), ReadReplicas: make(map[string][]DatabaseConfig), - Keycloak: KeycloakConfig{ - Issuer: getEnv("KEYCLOAK_ISSUER", "https://keycloak.example.com/auth/realms/yourrealm"), - Audience: getEnv("KEYCLOAK_AUDIENCE", "your-client-id"), - JwksURL: getEnv("KEYCLOAK_JWKS_URL", "https://keycloak.example.com/auth/realms/yourrealm/protocol/openid-connect/certs"), - Enabled: getEnvAsBool("KEYCLOAK_ENABLED", true), - }, + Auth: loadAuthConfig(), + Keycloak: loadKeycloakConfig(), Bpjs: BpjsConfig{ BaseURL: getEnv("BPJS_BASEURL", "https://apijkn.bpjs-kesehatan.go.id"), ConsID: getEnv("BPJS_CONSID", ""), @@ -194,8 +217,21 @@ func LoadConfig() *Config { BasePath: getEnv("SWAGGER_BASE_PATH", "/api/v1"), Schemes: parseSchemes(getEnv("SWAGGER_SCHEMES", "http,https")), }, + Security: SecurityConfig{ + TrustedOrigins: parseOrigins(getEnv("SECURITY_TRUSTED_ORIGINS", "http://localhost:3000,http://localhost:8080")), + MaxInputLength: getEnvAsInt("SECURITY_MAX_INPUT_LENGTH", 500), + RateLimit: RateLimitConfig{ + RequestsPerMinute: getEnvAsInt("RATE_LIMIT_REQUESTS_PER_MINUTE", 60), + Redis: RedisConfig{ + Host: getEnv("REDIS_HOST", "localhost"), + Port: getEnvAsInt("REDIS_PORT", 6379), + Password: getEnv("REDIS_PASSWORD", ""), + DB: getEnvAsInt("REDIS_DB", 0), + }, + }, + }, } - + log.Printf("DEBUG: Final Config Object. MaxInputLength is: %d", config.Security.MaxInputLength) // Initialize validator config.Validator = validator.New() @@ -205,28 +241,155 @@ func LoadConfig() *Config { // Load read replica configurations config.loadReadReplicaConfigs() + log.Printf("DEBUG [LoadConfig]: Config object created at address: %p", config) + log.Printf("DEBUG [LoadConfig]: Security.MaxInputLength is: %d", config.Security.MaxInputLength) return config } +func loadAuthConfig() AuthConfig { + // --- AWAL TAMBAHAN DEBUG --- + // Cetak direktori kerja saat ini untuk debugging + wd, err := os.Getwd() + if err != nil { + log.Printf("Error getting working directory: %v", err) + } else { + log.Printf("DEBUG: Current working directory is: %s", wd) + } + // --- AKHIR TAMBAHAN DEBUG --- + + authConfig := AuthConfig{ + Type: "jwt", // default to jwt for backward compatibility + FallbackTo: "", + StaticTokens: []string{}, + } + + // Path file yang akan dibaca + configPath := "internal/config/config.yaml" + log.Printf("DEBUG: Attempting to read auth config from: %s", configPath) + + // Load auth configuration from config.yaml first + if data, err := os.ReadFile(configPath); err == nil { + log.Printf("DEBUG: Successfully read config.yaml file. Parsing...") // Tambahkan log sukses + + var yamlConfig struct { + Auth AuthYAMLConfig `yaml:"auth"` + } + if err := yaml.Unmarshal(data, &yamlConfig); err == nil { + // Log nilai yang berhasil dibaca + log.Printf("DEBUG: Parsed YAML. Type: '%s', Tokens: %d", yamlConfig.Auth.Type, len(yamlConfig.Auth.StaticTokens)) + + authConfig.Type = yamlConfig.Auth.Type + authConfig.FallbackTo = yamlConfig.Auth.FallbackTo + authConfig.StaticTokens = yamlConfig.Auth.StaticTokens + } else { + log.Printf("ERROR: Failed to unmarshal YAML: %v", err) + } + } else { + // --- AWAL TAMBAHAN DEBUG --- + // Cetak error spesifik jika file tidak ditemukan + log.Printf("ERROR: Could not read config file at '%s': %v", configPath, err) + // --- AKHIR TAMBAHAN DEBUG --- + } + + // Then override with environment variables if set + if envType := getEnv("AUTH_TYPE", ""); envType != "" { + log.Printf("DEBUG: Overriding auth type with environment variable: %s", envType) + authConfig.Type = envType + } + if envFallback := getEnv("AUTH_FALLBACK_TO", ""); envFallback != "" { + authConfig.FallbackTo = envFallback + } + envTokens := parseStaticTokens(getEnv("AUTH_STATIC_TOKENS", "")) + if len(envTokens) > 0 { + authConfig.StaticTokens = envTokens + } + + // Log hasil akhir sebelum dikembalikan + log.Printf("DEBUG: Final AuthConfig before returning: Type='%s', TokenCount=%d", authConfig.Type, len(authConfig.StaticTokens)) + + return authConfig +} + +// Lakukan hal yang sama untuk loadKeycloakConfig +func loadKeycloakConfig() KeycloakConfig { + // --- AWAL TAMBAHAN DEBUG --- + // Cetak direktori kerja saat ini untuk debugging + wd, err := os.Getwd() + if err != nil { + log.Printf("Error getting working directory for keycloak config: %v", err) + } else { + log.Printf("DEBUG (Keycloak): Current working directory is: %s", wd) + } + // --- AKHIR TAMBAHAN DEBUG --- + + v := viper.New() + v.SetConfigName("config") + v.SetConfigType("yaml") + v.AddConfigPath(".") + v.AddConfigPath("./config") + v.AddConfigPath("./internal/config") + + // --- AWAL TAMBAHAN DEBUG --- + log.Printf("DEBUG (Keycloak): Viper is set to search for config in: '.', './config', './internal/config'") + // --- AKHIR TAMBAHAN DEBUG --- + + if err := v.ReadInConfig(); err == nil { + // Log jika file berhasil ditemukan dan dibaca + log.Printf("DEBUG (Keycloak): Successfully read config file: %s", v.ConfigFileUsed()) + + keycloakConfig := KeycloakConfig{ + Issuer: v.GetString("keycloak.issuer"), + Audience: v.GetString("keycloak.audience"), + JwksURL: v.GetString("keycloak.jwks_url"), + Enabled: v.GetBool("keycloak.enabled"), + } + + // Log nilai yang berhasil dibaca dari file + log.Printf("DEBUG (Keycloak): Parsed values from file. Issuer: '%s', Enabled: %t", keycloakConfig.Issuer, keycloakConfig.Enabled) + + log.Printf("Loaded keycloak config from file: enabled=%t", keycloakConfig.Enabled) + return keycloakConfig + } else { + // --- AWAL TAMBAHAN DEBUG --- + // Cetak error spesifik jika file tidak ditemukan + log.Printf("ERROR (Keycloak): Could not read config file: %v", err) + // --- AKHIR TAMBAHAN DEBUG --- + } + + // Fallback ke environment variable + log.Printf("DEBUG (Keycloak): Falling back to environment variables.") + fallbackConfig := KeycloakConfig{ + Issuer: getEnv("KEYCLOAK_ISSUER", ""), + Audience: getEnv("KEYCLOAK_AUDIENCE", ""), + JwksURL: getEnv("KEYCLOAK_JWKS_URL", ""), + Enabled: getEnvAsBool("KEYCLOAK_ENABLED", false), + } + + // Log hasil akhir dari fallback + log.Printf("DEBUG (Keycloak): Final fallback config. Issuer: '%s', Enabled: %t", fallbackConfig.Issuer, fallbackConfig.Enabled) + + return fallbackConfig +} + func (c *Config) loadDatabaseConfigs() { // Simplified approach: Directly load from environment variables // This ensures we get the exact values specified in .env - // Primary database configuration - c.Databases["default"] = DatabaseConfig{ - Name: "default", - Type: getEnv("DB_CONNECTION", "postgres"), - Host: getEnv("DB_HOST", "localhost"), - Port: getEnvAsInt("DB_PORT", 5432), - Username: getEnv("DB_USERNAME", ""), - Password: getEnv("DB_PASSWORD", ""), - Database: getEnv("DB_DATABASE", "satu_db"), - Schema: getEnv("DB_SCHEMA", "public"), - SSLMode: getEnv("DB_SSLMODE", "disable"), - MaxOpenConns: getEnvAsInt("DB_MAX_OPEN_CONNS", 25), - MaxIdleConns: getEnvAsInt("DB_MAX_IDLE_CONNS", 25), - ConnMaxLifetime: parseDuration(getEnv("DB_CONN_MAX_LIFETIME", "5m")), - } + // // Primary database configuration + // c.Databases["default"] = DatabaseConfig{ + // Name: "default", + // Type: getEnv("DB_CONNECTION", "postgres"), + // Host: getEnv("DB_HOST", "localhost"), + // Port: getEnvAsInt("DB_PORT", 5432), + // Username: getEnv("DB_USERNAME", ""), + // Password: getEnv("DB_PASSWORD", ""), + // Database: getEnv("DB_DATABASE", "satu_db"), + // Schema: getEnv("DB_SCHEMA", "public"), + // SSLMode: getEnv("DB_SSLMODE", "disable"), + // MaxOpenConns: getEnvAsInt("DB_MAX_OPEN_CONNS", 25), + // MaxIdleConns: getEnvAsInt("DB_MAX_IDLE_CONNS", 25), + // ConnMaxLifetime: parseDuration(getEnv("DB_CONN_MAX_LIFETIME", "5m")), + // } // SATUDATA database configuration c.addPostgreSQLConfigs() @@ -669,71 +832,141 @@ func parseSchemes(schemesStr string) []string { return schemes } +// parseStaticTokens parses comma-separated static tokens string into a slice +func parseStaticTokens(tokensStr string) []string { + if tokensStr == "" { + return []string{} + } + + tokens := strings.Split(tokensStr, ",") + for i, token := range tokens { + tokens[i] = strings.TrimSpace(token) + // Remove empty tokens + if tokens[i] == "" { + tokens = append(tokens[:i], tokens[i+1:]...) + i-- + } + } + return tokens +} + +func parseOrigins(originsStr string) []string { + if originsStr == "" { + return []string{"http://localhost:8080"} // Default untuk pengembangan + } + origins := strings.Split(originsStr, ",") + for i, origin := range origins { + origins[i] = strings.TrimSpace(origin) + } + return origins +} + func (c *Config) Validate() error { + var errs []string + if len(c.Databases) == 0 { - log.Fatal("At least one database configuration is required") + errs = append(errs, "at least one database configuration is required") } for name, db := range c.Databases { if db.Host == "" { - log.Fatalf("Database host is required for %s", name) + errs = append(errs, fmt.Sprintf("database host is required for %s", name)) } if db.Username == "" { - log.Fatalf("Database username is required for %s", name) + errs = append(errs, fmt.Sprintf("database username is required for %s", name)) } if db.Password == "" { - log.Fatalf("Database password is required for %s", name) + errs = append(errs, fmt.Sprintf("database password is required for %s", name)) } if db.Database == "" { - log.Fatalf("Database name is required for %s", name) + errs = append(errs, fmt.Sprintf("database name is required for %s", name)) } } if c.Bpjs.BaseURL == "" { - log.Fatal("BPJS Base URL is required") + errs = append(errs, "BPJS Base URL is required") } if c.Bpjs.ConsID == "" { - log.Fatal("BPJS Consumer ID is required") + errs = append(errs, "BPJS Consumer ID is required") } if c.Bpjs.UserKey == "" { - log.Fatal("BPJS User Key is required") + errs = append(errs, "BPJS User Key is required") } if c.Bpjs.SecretKey == "" { - log.Fatal("BPJS Secret Key is required") + errs = append(errs, "BPJS Secret Key is required") } - // Validate Keycloak configuration if enabled - if c.Keycloak.Enabled { + // Validate authentication configuration + switch c.Auth.Type { + case "keycloak": + if !c.Keycloak.Enabled { + errs = append(errs, "keycloak.enabled must be true when auth.type is 'keycloak'") + } if c.Keycloak.Issuer == "" { - log.Fatal("Keycloak issuer is required when Keycloak is enabled") + errs = append(errs, "keycloak.issuer is required when auth.type is 'keycloak'") } if c.Keycloak.Audience == "" { - log.Fatal("Keycloak audience is required when Keycloak is enabled") + errs = append(errs, "keycloak.audience is required when auth.type is 'keycloak'") } if c.Keycloak.JwksURL == "" { - log.Fatal("Keycloak JWKS URL is required when Keycloak is enabled") + errs = append(errs, "keycloak.jwks_url is required when auth.type is 'keycloak'") + } + case "static": + if len(c.Auth.StaticTokens) == 0 { + errs = append(errs, "auth.static_tokens is required when auth.type is 'static'") + } + case "hybrid": + if c.Auth.FallbackTo == "" { + errs = append(errs, "auth.fallback_to is required when auth.type is 'hybrid'") + } + // Validate fallback configuration + switch c.Auth.FallbackTo { + case "keycloak": + if !c.Keycloak.Enabled { + errs = append(errs, "keycloak.enabled must be true when auth.fallback_to is 'keycloak'") + } + case "static": + if len(c.Auth.StaticTokens) == 0 { + errs = append(errs, "auth.static_tokens is required when auth.fallback_to is 'static'") + } + } + } + + // Legacy validation for backward compatibility + if c.Auth.Type != "keycloak" && c.Keycloak.Enabled { + if c.Keycloak.Issuer == "" { + errs = append(errs, "Keycloak issuer is required when Keycloak is enabled") + } + if c.Keycloak.Audience == "" { + errs = append(errs, "Keycloak audience is required when Keycloak is enabled") + } + if c.Keycloak.JwksURL == "" { + errs = append(errs, "Keycloak JWKS URL is required when Keycloak is enabled") } } // Validate SatuSehat configuration if c.SatuSehat.OrgID == "" { - log.Fatal("SatuSehat Organization ID is required") + errs = append(errs, "SatuSehat Organization ID is required") } if c.SatuSehat.FasyakesID == "" { - log.Fatal("SatuSehat Fasyankes ID is required") + errs = append(errs, "SatuSehat Fasyankes ID is required") } if c.SatuSehat.ClientID == "" { - log.Fatal("SatuSehat Client ID is required") + errs = append(errs, "SatuSehat Client ID is required") } if c.SatuSehat.ClientSecret == "" { - log.Fatal("SatuSehat Client Secret is required") + errs = append(errs, "SatuSehat Client Secret is required") } if c.SatuSehat.AuthURL == "" { - log.Fatal("SatuSehat Auth URL is required") + errs = append(errs, "SatuSehat Auth URL is required") } if c.SatuSehat.BaseURL == "" { - log.Fatal("SatuSehat Base URL is required") + errs = append(errs, "SatuSehat Base URL is required") } + if len(errs) > 0 { + return fmt.Errorf("configuration validation failed: %s", strings.Join(errs, "; ")) + } return nil } diff --git a/internal/config/config.yaml b/internal/config/config.yaml new file mode 100644 index 0000000..2e44d78 --- /dev/null +++ b/internal/config/config.yaml @@ -0,0 +1,14 @@ +auth: + type: static # Options: jwt, keycloak, static, hybrid (for hybrid mode keycloak is primary and jwt is fallback) + static_tokens: + - token1 + - token2 + - token3 + - token4 + fallback_to: jwt # Options: keycloak, static, jwt (for hybrid mode keycloak is primary and jwt is fallback) +keycloak: + enabled: true + issuer: https://auth.rssa.top/realms/sandbox + audience: nuxtsim-pendaftaran + jwks_url: https://auth.rssa.top/realms/sandbox/protocol/openid-connect/certs + diff --git a/internal/handlers/auth/auth.go b/internal/handlers/auth/auth.go index 3bd74dd..c5b3fed 100644 --- a/internal/handlers/auth/auth.go +++ b/internal/handlers/auth/auth.go @@ -1,6 +1,7 @@ package handlers import ( + "api-service/internal/models/auth" models "api-service/internal/models/auth" services "api-service/internal/services/auth" "net/http" @@ -62,9 +63,22 @@ func (h *AuthHandler) Login(c *gin.Context) { // @Failure 401 {object} map[string]string "Unauthorized" // @Router /api/v1/auth/refresh [post] func (h *AuthHandler) RefreshToken(c *gin.Context) { - // For now, this is a placeholder for refresh token functionality - // In a real implementation, you would handle refresh tokens here - c.JSON(http.StatusNotImplemented, gin.H{"error": "refresh token not implemented"}) + var refreshReq auth.RefreshTokenRequest + + // Bind JSON request + if err := c.ShouldBindJSON(&refreshReq); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Refresh token + tokenResponse, err := h.authService.RefreshToken(refreshReq.RefreshToken) + if err != nil { + c.JSON(http.StatusUnauthorized, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, tokenResponse) } // Register godoc @@ -78,12 +92,7 @@ func (h *AuthHandler) RefreshToken(c *gin.Context) { // @Failure 400 {object} map[string]string "Bad request" // @Router /api/v1/auth/register [post] func (h *AuthHandler) Register(c *gin.Context) { - var registerReq struct { - Username string `json:"username" binding:"required"` - Email string `json:"email" binding:"required,email"` - Password string `json:"password" binding:"required,min=6"` - Role string `json:"role" binding:"required"` - } + var registerReq auth.RegisterRequest if err := c.ShouldBindJSON(®isterReq); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -123,10 +132,98 @@ func (h *AuthHandler) Me(c *gin.Context) { } // In a real implementation, you would fetch user details from database - c.JSON(http.StatusOK, gin.H{ - "id": userID, - "username": c.GetString("username"), - "email": c.GetString("email"), - "role": c.GetString("role"), + c.JSON(http.StatusOK, auth.UserResponse{ + ID: userID.(string), + Username: c.GetString("username"), + Email: c.GetString("email"), + Role: c.GetString("role"), }) } + +// TokenHandler handles token generation endpoints +type TokenHandler struct { + authService *services.AuthService +} + +// NewTokenHandler creates a new token handler +func NewTokenHandler(authService *services.AuthService) *TokenHandler { + return &TokenHandler{ + authService: authService, + } +} + +// GenerateToken godoc +// @Summary Generate JWT token +// @Description Generate a JWT token for testing purposes +// @Tags Token +// @Accept json +// @Produce json +// @Param token body map[string]interface{} true "Token generation data" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Router /api/v1/token/generate [post] +func (h *TokenHandler) GenerateToken(c *gin.Context) { + var req map[string]interface{} + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Extract user data from request + userID, ok := req["user_id"].(string) + if !ok || userID == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "user_id is required"}) + return + } + + username, _ := req["username"].(string) + email, _ := req["email"].(string) + role, _ := req["role"].(string) + + // Generate token + tokenResponse, err := h.authService.GenerateToken(userID, username, email, role) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, tokenResponse) +} + +// GenerateTokenDirect godoc +// @Summary Generate JWT token directly +// @Description Generate a JWT token directly with provided data +// @Tags Token +// @Accept json +// @Produce json +// @Param token body map[string]interface{} true "Token generation data" +// @Success 200 {object} models.TokenResponse +// @Failure 400 {object} map[string]string "Bad request" +// @Router /api/v1/token/generate-direct [post] +func (h *TokenHandler) GenerateTokenDirect(c *gin.Context) { + var req map[string]interface{} + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Extract user data from request + userID, ok := req["user_id"].(string) + if !ok || userID == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "user_id is required"}) + return + } + + username, _ := req["username"].(string) + email, _ := req["email"].(string) + role, _ := req["role"].(string) + + // Generate token directly + tokenResponse, err := h.authService.GenerateTokenDirect(userID, username, email, role) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, tokenResponse) +} diff --git a/internal/handlers/auth/token.go b/internal/handlers/auth/token.go deleted file mode 100644 index 02383c7..0000000 --- a/internal/handlers/auth/token.go +++ /dev/null @@ -1,95 +0,0 @@ -package handlers - -import ( - models "api-service/internal/models/auth" - services "api-service/internal/services/auth" - "net/http" - - "github.com/gin-gonic/gin" -) - -// TokenHandler handles token generation endpoints -type TokenHandler struct { - authService *services.AuthService -} - -// NewTokenHandler creates a new token handler -func NewTokenHandler(authService *services.AuthService) *TokenHandler { - return &TokenHandler{ - authService: authService, - } -} - -// GenerateToken godoc -// @Summary Generate JWT token -// @Description Generate a JWT token for a user -// @Tags Token -// @Accept json -// @Produce json -// @Param token body models.LoginRequest true "User credentials" -// @Success 200 {object} models.TokenResponse -// @Failure 400 {object} map[string]string "Bad request" -// @Failure 401 {object} map[string]string "Unauthorized" -// @Router /api/v1/token/generate [post] -func (h *TokenHandler) GenerateToken(c *gin.Context) { - var loginReq models.LoginRequest - - // Bind JSON request - if err := c.ShouldBindJSON(&loginReq); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - - // Generate token - tokenResponse, err := h.authService.Login(loginReq.Username, loginReq.Password) - if err != nil { - c.JSON(http.StatusUnauthorized, gin.H{"error": err.Error()}) - return - } - - c.JSON(http.StatusOK, tokenResponse) -} - -// GenerateTokenDirect godoc -// @Summary Generate token directly -// @Description Generate a JWT token directly without password verification (for testing) -// @Tags Token -// @Accept json -// @Produce json -// @Param user body map[string]string true "User info" -// @Success 200 {object} models.TokenResponse -// @Failure 400 {object} map[string]string "Bad request" -// @Router /api/v1/token/generate-direct [post] -func (h *TokenHandler) GenerateTokenDirect(c *gin.Context) { - var req struct { - Username string `json:"username" binding:"required"` - Email string `json:"email" binding:"required"` - Role string `json:"role" binding:"required"` - } - - if err := c.ShouldBindJSON(&req); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - - // Create a temporary user for token generation - user := &models.User{ - ID: "temp-" + req.Username, - Username: req.Username, - Email: req.Email, - Role: req.Role, - } - - // Generate token directly - token, err := h.authService.GenerateTokenForUser(user) - if err != nil { - c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) - return - } - - c.JSON(http.StatusOK, models.TokenResponse{ - AccessToken: token, - TokenType: "Bearer", - ExpiresIn: 3600, - }) -} diff --git a/internal/handlers/retribusi/retribusi.go b/internal/handlers/retribusi/retribusi.go index b5e9a94..5dd0362 100644 --- a/internal/handlers/retribusi/retribusi.go +++ b/internal/handlers/retribusi/retribusi.go @@ -5,15 +5,13 @@ import ( "api-service/internal/database" models "api-service/internal/models" "api-service/internal/models/retribusi" - utils "api-service/internal/utils/filters" - "api-service/internal/utils/validation" + queryUtils "api-service/internal/utils/query" "api-service/pkg/logger" "context" "database/sql" "fmt" "net/http" "strconv" - "strings" "sync" "time" @@ -22,19 +20,21 @@ import ( "github.com/google/uuid" ) +// ============================================================================= +// GLOBAL INITIALIZATION & VALIDATION +// ============================================================================= + var ( db database.Service once sync.Once validate *validator.Validate ) -// Initialize the database connection and validator +// Initialize the database connection and validator once func init() { once.Do(func() { db = database.New(config.LoadConfig()) validate = validator.New() - - // Register custom validations if needed validate.RegisterValidation("retribusi_status", validateRetribusiStatus) if db == nil { @@ -48,18 +48,38 @@ func validateRetribusiStatus(fl validator.FieldLevel) bool { return models.IsValidStatus(fl.Field().String()) } +// ============================================================================= +// RETRIBUSI HANDLER STRUCT +// ============================================================================= + // RetribusiHandler handles retribusi services type RetribusiHandler struct { - db database.Service + db database.Service + queryBuilder *queryUtils.QueryBuilder } -// NewRetribusiHandler creates a new RetribusiHandler +// NewRetribusiHandler creates a new RetribusiHandler with a pre-configured QueryBuilder func NewRetribusiHandler() *RetribusiHandler { + // PERUBAHAN: Inisialisasi QueryBuilder dengan daftar kolom yang diizinkan untuk keamanan. + queryBuilder := queryUtils.NewQueryBuilder(queryUtils.DBTypePostgreSQL). + SetAllowedColumns([]string{ + "id", "status", "sort", "user_created", "date_created", + "user_updated", "date_updated", "Jenis", "Pelayanan", + "Dinas", "Kelompok_obyek", "Kode_tarif", "Tarif", "Satuan", + "Tarif_overtime", "Satuan_overtime", "Rekening_pokok", + "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", + }) + return &RetribusiHandler{ - db: db, + db: db, + queryBuilder: queryBuilder, } } +// ============================================================================= +// HANDLER ENDPOINTS +// ============================================================================= + // GetRetribusi godoc // @Summary Get retribusi with pagination and optional aggregation // @Description Returns a paginated list of retribusis with optional summary statistics @@ -78,100 +98,85 @@ func NewRetribusiHandler() *RetribusiHandler { // @Failure 500 {object} models.ErrorResponse "Internal server error" // @Router /api/v1/retribusis [get] func (h *RetribusiHandler) GetRetribusi(c *gin.Context) { - // Parse pagination parameters - limit, offset, err := h.parsePaginationParams(c) - if err != nil { - h.respondError(c, "Invalid pagination parameters", err, http.StatusBadRequest) - return + // PERUBAHAN: Gunakan fungsi inti fetchRetribusisDynamic untuk semua logika pengambilan data. + // Kita hanya perlu membangun DynamicQuery dari parameter sederhana. + query := queryUtils.DynamicQuery{ + From: "data_retribusi", + Fields: []queryUtils.SelectField{{Expression: "*"}}, + Sort: []queryUtils.SortField{{Column: "date_created", Order: "DESC"}}, } - // Parse filter parameters - filter := h.parseFilterParams(c) - includeAggregation := c.Query("include_summary") == "true" + // Parse pagination + if limit, err := strconv.Atoi(c.DefaultQuery("limit", "10")); err == nil && limit > 0 && limit <= 100 { + query.Limit = limit + } + if offset, err := strconv.Atoi(c.DefaultQuery("offset", "0")); err == nil && offset >= 0 { + query.Offset = offset + } - // Get database connection + // Parse simple filters + var filters []queryUtils.DynamicFilter + if status := c.Query("status"); status != "" && models.IsValidStatus(status) { + filters = append(filters, queryUtils.DynamicFilter{Column: "status", Operator: queryUtils.OpEqual, Value: status}) + } + if jenis := c.Query("jenis"); jenis != "" { + filters = append(filters, queryUtils.DynamicFilter{Column: "Jenis", Operator: queryUtils.OpILike, Value: "%" + jenis + "%"}) + } + if dinas := c.Query("dinas"); dinas != "" { + filters = append(filters, queryUtils.DynamicFilter{Column: "Dinas", Operator: queryUtils.OpILike, Value: "%" + dinas + "%"}) + } + if search := c.Query("search"); search != "" { + // Jika ada search, buat grup filter OR + searchFilters := []queryUtils.DynamicFilter{ + {Column: "Jenis", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "Pelayanan", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "Dinas", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "Kode_tarif", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "Uraian_1", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + } + query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: searchFilters, LogicOp: "OR"}) + } + + // Tambahkan filter lainnya (jika ada) sebagai grup AND + if len(filters) > 0 { + query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: filters, LogicOp: "AND"}) + } + + // Eksekusi query inti dbConn, err := h.db.GetDB("postgres_satudata") if err != nil { h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) return } - - // Create context with timeout ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) defer cancel() - // Execute concurrent operations - var ( - retribusis []retribusi.Retribusi - total int - aggregateData *models.AggregateData - wg sync.WaitGroup - errChan = make(chan error, 3) - mu sync.Mutex - ) - - // Fetch total count - wg.Add(1) - go func() { - defer wg.Done() - if err := h.getTotalCount(ctx, dbConn, filter, &total); err != nil { - mu.Lock() - errChan <- fmt.Errorf("failed to get total count: %w", err) - mu.Unlock() - } - }() - - // Fetch main data - wg.Add(1) - go func() { - defer wg.Done() - result, err := h.fetchRetribusis(ctx, dbConn, filter, limit, offset) - mu.Lock() - if err != nil { - errChan <- fmt.Errorf("failed to fetch data: %w", err) - } else { - retribusis = result - } - mu.Unlock() - }() - - // Fetch aggregation data if requested - if includeAggregation { - wg.Add(1) - go func() { - defer wg.Done() - result, err := h.getAggregateData(ctx, dbConn, filter) - mu.Lock() - if err != nil { - errChan <- fmt.Errorf("failed to get aggregate data: %w", err) - } else { - aggregateData = result - } - mu.Unlock() - }() + retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return } - // Wait for all goroutines - wg.Wait() - close(errChan) - - // Check for errors - for err := range errChan { + // Jika diminta, ambil data agregasi + var aggregateData *models.AggregateData + if c.Query("include_summary") == "true" { + // PERUBAHAN: parseFilterParams dihapus, kita gunakan filter yang sudah dibuat. + aggregateData, err = h.getAggregateData(ctx, dbConn, query.Filters) if err != nil { - h.logAndRespondError(c, "Data processing failed", err, http.StatusInternalServerError) + h.logAndRespondError(c, "Failed to get aggregate data", err, http.StatusInternalServerError) return } } - // Build response - meta := h.calculateMeta(limit, offset, total) + // Bangun respons + meta := h.calculateMeta(query.Limit, query.Offset, total) response := retribusi.RetribusiGetResponse{ Message: "Data retribusi berhasil diambil", Data: retribusis, Meta: meta, } - if includeAggregation && aggregateData != nil { + if aggregateData != nil { response.Summary = aggregateData } @@ -192,8 +197,6 @@ func (h *RetribusiHandler) GetRetribusi(c *gin.Context) { // @Router /api/v1/retribusi/{id} [get] func (h *RetribusiHandler) GetRetribusiByID(c *gin.Context) { id := c.Param("id") - - // Validate UUID format if _, err := uuid.Parse(id); err != nil { h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) return @@ -204,11 +207,30 @@ func (h *RetribusiHandler) GetRetribusiByID(c *gin.Context) { h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) return } - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) defer cancel() - dataretribusi, err := h.getRetribusiByID(ctx, dbConn, id) + dynamicQuery := queryUtils.DynamicQuery{ + From: "data_retribusi", + Fields: []queryUtils.SelectField{{Expression: "*"}}, + Filters: []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: "id", Operator: queryUtils.OpEqual, Value: id}, + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + LogicOp: "AND", + }}, + Limit: 1, + } + + query, args, err := h.queryBuilder.BuildQuery(dynamicQuery) + if err != nil { + h.logAndRespondError(c, "Failed to build query", err, http.StatusInternalServerError) + return + } + + row := dbConn.QueryRowContext(ctx, query, args...) + dataretribusi, err := h.scanRetribusiFromRow(row) if err != nil { if err == sql.ErrNoRows { h.respondError(c, "Retribusi not found", err, http.StatusNotFound) @@ -220,9 +242,8 @@ func (h *RetribusiHandler) GetRetribusiByID(c *gin.Context) { response := retribusi.RetribusiGetByIDResponse{ Message: "Retribusi details retrieved successfully", - Data: dataretribusi, + Data: &dataretribusi, } - c.JSON(http.StatusOK, response) } @@ -242,301 +263,39 @@ func (h *RetribusiHandler) GetRetribusiByID(c *gin.Context) { // @Failure 500 {object} models.ErrorResponse "Internal server error" // @Router /api/v1/retribusis/dynamic [get] func (h *RetribusiHandler) GetRetribusiDynamic(c *gin.Context) { - // Parse query parameters - parser := utils.NewQueryParser().SetLimits(10, 100) - dynamicQuery, err := parser.ParseQuery(c.Request.URL.Query()) + parser := queryUtils.NewQueryParser().SetLimits(10, 100) + dynamicQuery, err := parser.ParseQuery(c.Request.URL.Query(), "data_retribusi") if err != nil { h.respondError(c, "Invalid query parameters", err, http.StatusBadRequest) return } - // Get database connection + // Add default filter to exclude deleted records + dynamicQuery.Filters = append([]queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}}, + LogicOp: "AND", + }}, dynamicQuery.Filters...) + dbConn, err := h.db.GetDB("postgres_satudata") if err != nil { h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) return } - - // Create context with timeout ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) defer cancel() - // Execute query with dynamic filtering retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, dynamicQuery) if err != nil { h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) return } - // Build response meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, total) response := retribusi.RetribusiGetResponse{ Message: "Data retribusi berhasil diambil", Data: retribusis, Meta: meta, } - - c.JSON(http.StatusOK, response) -} - -// fetchRetribusisDynamic executes dynamic query -func (h *RetribusiHandler) fetchRetribusisDynamic(ctx context.Context, dbConn *sql.DB, query utils.DynamicQuery) ([]retribusi.Retribusi, int, error) { - // Setup query builder - countBuilder := utils.NewQueryBuilder("data_retribusi"). - SetColumnMapping(map[string]string{ - "jenis": "Jenis", - "pelayanan": "Pelayanan", - "dinas": "Dinas", - "kelompok_obyek": "Kelompok_obyek", - "Kode_tarif": "Kode_tarif", - "kode_tarif": "Kode_tarif", - "tarif": "Tarif", - "satuan": "Satuan", - "tarif_overtime": "Tarif_overtime", - "satuan_overtime": "Satuan_overtime", - "rekening_pokok": "Rekening_pokok", - "rekening_denda": "Rekening_denda", - "uraian_1": "Uraian_1", - "uraian_2": "Uraian_2", - "uraian_3": "Uraian_3", - }). - SetAllowedColumns([]string{ - "id", "status", "sort", "user_created", "date_created", - "user_updated", "date_updated", "Jenis", "Pelayanan", - "Dinas", "Kelompok_obyek", "Kode_tarif", "Tarif", "Satuan", - "Tarif_overtime", "Satuan_overtime", "Rekening_pokok", - "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", - }) - - mainBuilder := utils.NewQueryBuilder("data_retribusi"). - SetColumnMapping(map[string]string{ - "jenis": "Jenis", - "pelayanan": "Pelayanan", - "dinas": "Dinas", - "kelompok_obyek": "Kelompok_obyek", - "Kode_tarif": "Kode_tarif", - "kode_tarif": "Kode_tarif", - "tarif": "Tarif", - "satuan": "Satuan", - "tarif_overtime": "Tarif_overtime", - "satuan_overtime": "Satuan_overtime", - "rekening_pokok": "Rekening_pokok", - "rekening_denda": "Rekening_denda", - "uraian_1": "Uraian_1", - "uraian_2": "Uraian_2", - "uraian_3": "Uraian_3", - }). - SetAllowedColumns([]string{ - "id", "status", "sort", "user_created", "date_created", - "user_updated", "date_updated", "Jenis", "Pelayanan", - "Dinas", "Kelompok_obyek", "Kode_tarif", "Tarif", "Satuan", - "Tarif_overtime", "Satuan_overtime", "Rekening_pokok", - "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", - }) - - // Add default filter to exclude deleted records - if len(query.Filters) > 0 { - query.Filters = append([]utils.FilterGroup{{ - Filters: []utils.DynamicFilter{{ - Column: "status", - Operator: utils.OpNotEqual, - Value: "deleted", - }}, - LogicOp: "AND", - }}, query.Filters...) - } else { - query.Filters = []utils.FilterGroup{{ - Filters: []utils.DynamicFilter{{ - Column: "status", - Operator: utils.OpNotEqual, - Value: "deleted", - }}, - LogicOp: "AND", - }} - } - - // Execute queries sequentially to avoid race conditions - var total int - var retribusis []retribusi.Retribusi - - // 1. Get total count first - countQuery := query - countQuery.Limit = 0 - countQuery.Offset = 0 - - countSQL, countArgs, err := countBuilder.BuildCountQuery(countQuery) - if err != nil { - return nil, 0, fmt.Errorf("failed to build count query: %w", err) - } - - if err := dbConn.QueryRowContext(ctx, countSQL, countArgs...).Scan(&total); err != nil { - return nil, 0, fmt.Errorf("failed to get total count: %w", err) - } - - // 2. Get main data - mainSQL, mainArgs, err := mainBuilder.BuildQuery(query) - if err != nil { - return nil, 0, fmt.Errorf("failed to build main query: %w", err) - } - - rows, err := dbConn.QueryContext(ctx, mainSQL, mainArgs...) - if err != nil { - return nil, 0, fmt.Errorf("failed to execute main query: %w", err) - } - defer rows.Close() - - for rows.Next() { - retribusi, err := h.scanRetribusi(rows) - if err != nil { - return nil, 0, fmt.Errorf("failed to scan retribusi: %w", err) - } - retribusis = append(retribusis, retribusi) - } - - if err := rows.Err(); err != nil { - return nil, 0, fmt.Errorf("rows iteration error: %w", err) - } - - return retribusis, total, nil -} - -// SearchRetribusiAdvanced provides advanced search capabilities -func (h *RetribusiHandler) SearchRetribusiAdvanced(c *gin.Context) { - // Parse complex search parameters - searchQuery := c.Query("q") - if searchQuery == "" { - // If no search query provided, return all records with default sorting - query := utils.DynamicQuery{ - Fields: []string{"*"}, - Filters: []utils.FilterGroup{}, // Empty filters - fetchRetribusisDynamic will add default deleted filter - Sort: []utils.SortField{{ - Column: "date_created", - Order: "DESC", - }}, - Limit: 20, - Offset: 0, - } - - // Parse pagination if provided - if limit := c.Query("limit"); limit != "" { - if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { - query.Limit = l - } - } - - if offset := c.Query("offset"); offset != "" { - if o, err := strconv.Atoi(offset); err == nil && o >= 0 { - query.Offset = o - } - } - - // Get database connection - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) - defer cancel() - - // Execute query to get all records - retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, query) - if err != nil { - h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) - return - } - - // Build response - meta := h.calculateMeta(query.Limit, query.Offset, total) - response := retribusi.RetribusiGetResponse{ - Message: "All records retrieved (no search query provided)", - Data: retribusis, - Meta: meta, - } - - c.JSON(http.StatusOK, response) - return - } - - // Build dynamic query for search - query := utils.DynamicQuery{ - Fields: []string{"*"}, - Filters: []utils.FilterGroup{{ - Filters: []utils.DynamicFilter{ - { - Column: "Jenis", - Operator: utils.OpContains, - Value: searchQuery, - LogicOp: "OR", - }, - { - Column: "Pelayanan", - Operator: utils.OpContains, - Value: searchQuery, - LogicOp: "OR", - }, - { - Column: "Dinas", - Operator: utils.OpContains, - Value: searchQuery, - LogicOp: "OR", - }, - { - Column: "Uraian_1", - Operator: utils.OpContains, - Value: searchQuery, - LogicOp: "OR", - }, - }, - LogicOp: "AND", - }}, - Sort: []utils.SortField{{ - Column: "date_created", - Order: "DESC", - }}, - Limit: 20, - Offset: 0, - } - - // Parse pagination if provided - if limit := c.Query("limit"); limit != "" { - if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { - query.Limit = l - } - } - - if offset := c.Query("offset"); offset != "" { - if o, err := strconv.Atoi(offset); err == nil && o >= 0 { - query.Offset = o - } - } - - // Get database connection - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) - defer cancel() - - // Execute search - retribusis, total, err := h.fetchRetribusisDynamic(ctx, dbConn, query) - if err != nil { - h.logAndRespondError(c, "Search failed", err, http.StatusInternalServerError) - return - } - - // Build response - meta := h.calculateMeta(query.Limit, query.Offset, total) - response := retribusi.RetribusiGetResponse{ - Message: fmt.Sprintf("Search results for '%s'", searchQuery), - Data: retribusis, - Meta: meta, - } - c.JSON(http.StatusOK, response) } @@ -553,13 +312,10 @@ func (h *RetribusiHandler) SearchRetribusiAdvanced(c *gin.Context) { // @Router /api/v1/retribusis [post] func (h *RetribusiHandler) CreateRetribusi(c *gin.Context) { var req retribusi.RetribusiCreateRequest - if err := c.ShouldBindJSON(&req); err != nil { h.respondError(c, "Invalid request body", err, http.StatusBadRequest) return } - - // Validate request if err := validate.Struct(&req); err != nil { h.respondError(c, "Validation failed", err, http.StatusBadRequest) return @@ -570,27 +326,41 @@ func (h *RetribusiHandler) CreateRetribusi(c *gin.Context) { h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) return } - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) defer cancel() - // Validate duplicate and daily submission - if err := h.validateRetribusiSubmission(ctx, dbConn, &req); err != nil { - h.respondError(c, "Validation failed", err, http.StatusBadRequest) + data := queryUtils.InsertData{ + Columns: []string{ + "id", "status", "date_created", "date_updated", + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", + }, + Values: []interface{}{ + uuid.New().String(), req.Status, time.Now(), time.Now(), + req.Jenis, req.Pelayanan, req.Dinas, req.KelompokObyek, req.KodeTarif, + req.Tarif, req.Satuan, req.TarifOvertime, req.SatuanOvertime, + req.RekeningPokok, req.RekeningDenda, req.Uraian1, req.Uraian2, req.Uraian3, + }, + } + returningCols := []string{ + "id", "status", "sort", "user_created", "date_created", "user_updated", "date_updated", + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", + } + queryStr, args, err := h.queryBuilder.BuildInsertQuery("data_retribusi", data, returningCols...) + if err != nil { + h.logAndRespondError(c, "Failed to build insert query", err, http.StatusInternalServerError) return } - - dataretribusi, err := h.createRetribusi(ctx, dbConn, &req) + row := dbConn.QueryRowContext(ctx, queryStr, args...) + dataretribusi, err := h.scanRetribusiFromRow(row) if err != nil { h.logAndRespondError(c, "Failed to create retribusi", err, http.StatusInternalServerError) return } - - response := retribusi.RetribusiCreateResponse{ - Message: "Retribusi berhasil dibuat", - Data: dataretribusi, - } - + response := retribusi.RetribusiCreateResponse{Message: "Retribusi berhasil dibuat", Data: &dataretribusi} c.JSON(http.StatusCreated, response) } @@ -609,38 +379,59 @@ func (h *RetribusiHandler) CreateRetribusi(c *gin.Context) { // @Router /api/v1/retribusi/{id} [put] func (h *RetribusiHandler) UpdateRetribusi(c *gin.Context) { id := c.Param("id") - - // Validate UUID format if _, err := uuid.Parse(id); err != nil { h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) return } - var req retribusi.RetribusiUpdateRequest if err := c.ShouldBindJSON(&req); err != nil { h.respondError(c, "Invalid request body", err, http.StatusBadRequest) return } - - // Set ID from path parameter req.ID = id - - // Validate request if err := validate.Struct(&req); err != nil { h.respondError(c, "Validation failed", err, http.StatusBadRequest) return } - dbConn, err := h.db.GetDB("postgres_satudata") if err != nil { h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) return } - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) defer cancel() - - dataretribusi, err := h.updateRetribusi(ctx, dbConn, &req) + updateData := queryUtils.UpdateData{ + Columns: []string{ + "status", "date_updated", "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", + }, + Values: []interface{}{ + req.Status, time.Now(), req.Jenis, req.Pelayanan, req.Dinas, req.KelompokObyek, req.KodeTarif, + req.Tarif, req.Satuan, req.TarifOvertime, req.SatuanOvertime, + req.RekeningPokok, req.RekeningDenda, req.Uraian1, req.Uraian2, req.Uraian3, + }, + } + filters := []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: "id", Operator: queryUtils.OpEqual, Value: req.ID}, + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + LogicOp: "AND", + }} + returningCols := []string{ + "id", "status", "sort", "user_created", "date_created", "user_updated", "date_updated", + "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", + "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", + "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3", + } + queryStr, args, err := h.queryBuilder.BuildUpdateQuery("data_retribusi", updateData, filters, returningCols...) + if err != nil { + h.logAndRespondError(c, "Failed to build update query", err, http.StatusInternalServerError) + return + } + row := dbConn.QueryRowContext(ctx, queryStr, args...) + dataretribusi, err := h.scanRetribusiFromRow(row) if err != nil { if err == sql.ErrNoRows { h.respondError(c, "Retribusi not found", err, http.StatusNotFound) @@ -649,12 +440,7 @@ func (h *RetribusiHandler) UpdateRetribusi(c *gin.Context) { } return } - - response := retribusi.RetribusiUpdateResponse{ - Message: "Retribusi berhasil diperbarui", - Data: dataretribusi, - } - + response := retribusi.RetribusiUpdateResponse{Message: "Retribusi berhasil diperbarui", Data: &dataretribusi} c.JSON(http.StatusOK, response) } @@ -672,37 +458,48 @@ func (h *RetribusiHandler) UpdateRetribusi(c *gin.Context) { // @Router /api/v1/retribusi/{id} [delete] func (h *RetribusiHandler) DeleteRetribusi(c *gin.Context) { id := c.Param("id") - - // Validate UUID format if _, err := uuid.Parse(id); err != nil { h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) return } - dbConn, err := h.db.GetDB("postgres_satudata") if err != nil { h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) return } - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) defer cancel() - - err = h.deleteRetribusi(ctx, dbConn, id) + updateData := queryUtils.UpdateData{ + Columns: []string{"status", "date_updated"}, + Values: []interface{}{"deleted", time.Now()}, + } + filters := []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: "id", Operator: queryUtils.OpEqual, Value: id}, + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + LogicOp: "AND", + }} + queryStr, args, err := h.queryBuilder.BuildUpdateQuery("data_retribusi", updateData, filters) if err != nil { - if err == sql.ErrNoRows { - h.respondError(c, "Retribusi not found", err, http.StatusNotFound) - } else { - h.logAndRespondError(c, "Failed to delete retribusi", err, http.StatusInternalServerError) - } + h.logAndRespondError(c, "Failed to build delete query", err, http.StatusInternalServerError) return } - - response := retribusi.RetribusiDeleteResponse{ - Message: "Retribusi berhasil dihapus", - ID: id, + result, err := dbConn.ExecContext(ctx, queryStr, args...) + if err != nil { + h.logAndRespondError(c, "Failed to delete retribusi", err, http.StatusInternalServerError) + return } - + rowsAffected, err := result.RowsAffected() + if err != nil { + h.logAndRespondError(c, "Failed to get affected rows", err, http.StatusInternalServerError) + return + } + if rowsAffected == 0 { + h.respondError(c, "Retribusi not found", sql.ErrNoRows, http.StatusNotFound) + return + } + response := retribusi.RetribusiDeleteResponse{Message: "Retribusi berhasil dihapus", ID: id} c.JSON(http.StatusOK, response) } @@ -722,12 +519,18 @@ func (h *RetribusiHandler) GetRetribusiStats(c *gin.Context) { h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) return } - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) defer cancel() - filter := h.parseFilterParams(c) - aggregateData, err := h.getAggregateData(ctx, dbConn, filter) + // PERUBAHAN: Kita tidak lagi parseFilterParams, kita bisa menggunakan QueryParser di sini juga jika perlu + // atau membangun filter secara manual seperti di GetRetribusi. + // Untuk contoh, kita asumsikan tidak ada filter selain default. + filterGroups := []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}}, + LogicOp: "AND", + }} + + aggregateData, err := h.getAggregateData(ctx, dbConn, filterGroups) if err != nil { h.logAndRespondError(c, "Failed to get statistics", err, http.StatusInternalServerError) return @@ -739,346 +542,48 @@ func (h *RetribusiHandler) GetRetribusiStats(c *gin.Context) { }) } -// Get retribusi by ID -func (h *RetribusiHandler) getRetribusiByID(ctx context.Context, dbConn *sql.DB, id string) (*retribusi.Retribusi, error) { - query := ` - SELECT - id, status, sort, user_created, date_created, user_updated, date_updated, - "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", - "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", - "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3" - FROM data_retribusi - WHERE id = $1 AND status != 'deleted'` +// ============================================================================= +// HELPER FUNCTIONS +// ============================================================================= - row := dbConn.QueryRowContext(ctx, query, id) - - var retribusi retribusi.Retribusi - err := row.Scan( - &retribusi.ID, &retribusi.Status, &retribusi.Sort, &retribusi.UserCreated, - &retribusi.DateCreated, &retribusi.UserUpdated, &retribusi.DateUpdated, - &retribusi.Jenis, &retribusi.Pelayanan, &retribusi.Dinas, &retribusi.KelompokObyek, - &retribusi.KodeTarif, &retribusi.Tarif, &retribusi.Satuan, &retribusi.TarifOvertime, - &retribusi.SatuanOvertime, &retribusi.RekeningPokok, &retribusi.RekeningDenda, - &retribusi.Uraian1, &retribusi.Uraian2, &retribusi.Uraian3, - ) +// fetchRetribusisDynamic executes a dynamic query to get retribusi data and total count +func (h *RetribusiHandler) fetchRetribusisDynamic(ctx context.Context, dbConn *sql.DB, query queryUtils.DynamicQuery) ([]retribusi.Retribusi, int, error) { + var total int + var retribusis []retribusi.Retribusi + // 1. Get total count + countQuery := query + countQuery.Limit = 0 + countQuery.Offset = 0 + countQueryStr, countArgs, err := h.queryBuilder.BuildCountQuery(countQuery) if err != nil { - return nil, err + return nil, 0, fmt.Errorf("failed to build count query: %w", err) + } + if err := dbConn.QueryRowContext(ctx, countQueryStr, countArgs...).Scan(&total); err != nil { + return nil, 0, fmt.Errorf("failed to get total count: %w", err) } - return &retribusi, nil -} - -// Create retribusi -func (h *RetribusiHandler) createRetribusi(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiCreateRequest) (*retribusi.Retribusi, error) { - id := uuid.New().String() - now := time.Now() - - query := ` - INSERT INTO data_retribusi ( - id, status, date_created, date_updated, - "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", - "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", - "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3" - ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) - RETURNING - id, status, sort, user_created, date_created, user_updated, date_updated, - "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", - "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", - "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3"` - - row := dbConn.QueryRowContext(ctx, query, - id, req.Status, now, now, - req.Jenis, req.Pelayanan, req.Dinas, req.KelompokObyek, req.KodeTarif, - req.Tarif, req.Satuan, req.TarifOvertime, req.SatuanOvertime, - req.RekeningPokok, req.RekeningDenda, req.Uraian1, req.Uraian2, req.Uraian3, - ) - - var retribusi retribusi.Retribusi - err := row.Scan( - &retribusi.ID, &retribusi.Status, &retribusi.Sort, &retribusi.UserCreated, - &retribusi.DateCreated, &retribusi.UserUpdated, &retribusi.DateUpdated, - &retribusi.Jenis, &retribusi.Pelayanan, &retribusi.Dinas, &retribusi.KelompokObyek, - &retribusi.KodeTarif, &retribusi.Tarif, &retribusi.Satuan, &retribusi.TarifOvertime, - &retribusi.SatuanOvertime, &retribusi.RekeningPokok, &retribusi.RekeningDenda, - &retribusi.Uraian1, &retribusi.Uraian2, &retribusi.Uraian3, - ) - + // 2. Get main data + mainQuery, mainArgs, err := h.queryBuilder.BuildQuery(query) if err != nil { - return nil, fmt.Errorf("failed to create retribusi: %w", err) + return nil, 0, fmt.Errorf("failed to build main query: %w", err) } - - return &retribusi, nil -} - -// Update retribusi -func (h *RetribusiHandler) updateRetribusi(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiUpdateRequest) (*retribusi.Retribusi, error) { - now := time.Now() - - query := ` - UPDATE data_retribusi SET - status = $2, date_updated = $3, - "Jenis" = $4, "Pelayanan" = $5, "Dinas" = $6, "Kelompok_obyek" = $7, "Kode_tarif" = $8, - "Tarif" = $9, "Satuan" = $10, "Tarif_overtime" = $11, "Satuan_overtime" = $12, - "Rekening_pokok" = $13, "Rekening_denda" = $14, "Uraian_1" = $15, "Uraian_2" = $16, "Uraian_3" = $17 - WHERE id = $1 AND status != 'deleted' - RETURNING - id, status, sort, user_created, date_created, user_updated, date_updated, - "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", - "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", - "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3"` - - row := dbConn.QueryRowContext(ctx, query, - req.ID, req.Status, now, - req.Jenis, req.Pelayanan, req.Dinas, req.KelompokObyek, req.KodeTarif, - req.Tarif, req.Satuan, req.TarifOvertime, req.SatuanOvertime, - req.RekeningPokok, req.RekeningDenda, req.Uraian1, req.Uraian2, req.Uraian3, - ) - - var retribusi retribusi.Retribusi - err := row.Scan( - &retribusi.ID, &retribusi.Status, &retribusi.Sort, &retribusi.UserCreated, - &retribusi.DateCreated, &retribusi.UserUpdated, &retribusi.DateUpdated, - &retribusi.Jenis, &retribusi.Pelayanan, &retribusi.Dinas, &retribusi.KelompokObyek, - &retribusi.KodeTarif, &retribusi.Tarif, &retribusi.Satuan, &retribusi.TarifOvertime, - &retribusi.SatuanOvertime, &retribusi.RekeningPokok, &retribusi.RekeningDenda, - &retribusi.Uraian1, &retribusi.Uraian2, &retribusi.Uraian3, - ) - + retribusis, err = h.fetchWithSQL(ctx, dbConn, mainQuery, mainArgs) if err != nil { - return nil, fmt.Errorf("failed to update retribusi: %w", err) + return nil, 0, fmt.Errorf("failed to execute main query: %w", err) } - return &retribusi, nil + return retribusis, total, nil } -// Soft delete retribusi -func (h *RetribusiHandler) deleteRetribusi(ctx context.Context, dbConn *sql.DB, id string) error { - now := time.Now() - - query := `UPDATE data_retribusi SET status = 'deleted', date_updated = $2 WHERE id = $1 AND status != 'deleted'` - - result, err := dbConn.ExecContext(ctx, query, id, now) - if err != nil { - return fmt.Errorf("failed to delete retribusi: %w", err) - } - - rowsAffected, err := result.RowsAffected() - if err != nil { - return fmt.Errorf("failed to get affected rows: %w", err) - } - - if rowsAffected == 0 { - return sql.ErrNoRows - } - - return nil -} - -// Enhanced error handling -func (h *RetribusiHandler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) { - logger.Error(message, map[string]interface{}{ - "error": err.Error(), - "status_code": statusCode, - }) - h.respondError(c, message, err, statusCode) -} - -func (h *RetribusiHandler) respondError(c *gin.Context, message string, err error, statusCode int) { - errorMessage := message - if gin.Mode() == gin.ReleaseMode { - errorMessage = "Internal server error" - } - - c.JSON(statusCode, models.ErrorResponse{ - Error: errorMessage, - Code: statusCode, - Message: err.Error(), - Timestamp: time.Now(), - }) -} - -// Parse pagination parameters dengan validation yang lebih ketat -func (h *RetribusiHandler) parsePaginationParams(c *gin.Context) (int, int, error) { - limit := 10 // Default limit - offset := 0 // Default offset - - if limitStr := c.Query("limit"); limitStr != "" { - parsedLimit, err := strconv.Atoi(limitStr) - if err != nil { - return 0, 0, fmt.Errorf("invalid limit parameter: %s", limitStr) - } - if parsedLimit <= 0 { - return 0, 0, fmt.Errorf("limit must be greater than 0") - } - if parsedLimit > 100 { - return 0, 0, fmt.Errorf("limit cannot exceed 100") - } - limit = parsedLimit - } - - if offsetStr := c.Query("offset"); offsetStr != "" { - parsedOffset, err := strconv.Atoi(offsetStr) - if err != nil { - return 0, 0, fmt.Errorf("invalid offset parameter: %s", offsetStr) - } - if parsedOffset < 0 { - return 0, 0, fmt.Errorf("offset cannot be negative") - } - offset = parsedOffset - } - - logger.Debug("Pagination parameters", map[string]interface{}{ - "limit": limit, - "offset": offset, - }) - return limit, offset, nil -} - -// Build WHERE clause dengan filter parameters -func (h *RetribusiHandler) buildWhereClause(filter retribusi.RetribusiFilter) (string, []interface{}) { - conditions := []string{"status != 'deleted'"} - args := []interface{}{} - paramCount := 1 - - if filter.Status != nil { - conditions = append(conditions, fmt.Sprintf("status = $%d", paramCount)) - args = append(args, *filter.Status) - paramCount++ - } - - if filter.Jenis != nil { - conditions = append(conditions, fmt.Sprintf(`"Jenis" ILIKE $%d`, paramCount)) - args = append(args, "%"+*filter.Jenis+"%") - paramCount++ - } - - if filter.Dinas != nil { - conditions = append(conditions, fmt.Sprintf(`"Dinas" ILIKE $%d`, paramCount)) - args = append(args, "%"+*filter.Dinas+"%") - paramCount++ - } - - if filter.KelompokObyek != nil { - conditions = append(conditions, fmt.Sprintf(`"Kelompok_obyek" ILIKE $%d`, paramCount)) - args = append(args, "%"+*filter.KelompokObyek+"%") - paramCount++ - } - - if filter.Search != nil { - searchCondition := fmt.Sprintf(`( - "Jenis" ILIKE $%d OR - "Pelayanan" ILIKE $%d OR - "Dinas" ILIKE $%d OR - "Kode_tarif" ILIKE $%d OR - "Uraian_1" ILIKE $%d OR - "Uraian_2" ILIKE $%d OR - "Uraian_3" ILIKE $%d - )`, paramCount, paramCount, paramCount, paramCount, paramCount, paramCount, paramCount) - conditions = append(conditions, searchCondition) - searchTerm := "%" + *filter.Search + "%" - args = append(args, searchTerm) - paramCount++ - } - - if filter.DateFrom != nil { - conditions = append(conditions, fmt.Sprintf("date_created >= $%d", paramCount)) - args = append(args, *filter.DateFrom) - paramCount++ - } - - if filter.DateTo != nil { - conditions = append(conditions, fmt.Sprintf("date_created <= $%d", paramCount)) - args = append(args, filter.DateTo.Add(24*time.Hour-time.Nanosecond)) // End of day - paramCount++ - } - - return strings.Join(conditions, " AND "), args -} - -// Optimized scanning function yang menggunakan sql.Null* types langsung -func (h *RetribusiHandler) scanRetribusi(rows *sql.Rows) (retribusi.Retribusi, error) { - var retribusi retribusi.Retribusi - - return retribusi, rows.Scan( - &retribusi.ID, - &retribusi.Status, - &retribusi.Sort, - &retribusi.UserCreated, - &retribusi.DateCreated, - &retribusi.UserUpdated, - &retribusi.DateUpdated, - &retribusi.Jenis, - &retribusi.Pelayanan, - &retribusi.Dinas, - &retribusi.KelompokObyek, - &retribusi.KodeTarif, - &retribusi.Tarif, - &retribusi.Satuan, - &retribusi.TarifOvertime, - &retribusi.SatuanOvertime, - &retribusi.RekeningPokok, - &retribusi.RekeningDenda, - &retribusi.Uraian1, - &retribusi.Uraian2, - &retribusi.Uraian3, - ) -} - -// Parse filter parameters dari query string -func (h *RetribusiHandler) parseFilterParams(c *gin.Context) retribusi.RetribusiFilter { - filter := retribusi.RetribusiFilter{} - - if status := c.Query("status"); status != "" { - if models.IsValidStatus(status) { - filter.Status = &status - } - } - - if jenis := c.Query("jenis"); jenis != "" { - filter.Jenis = &jenis - } - - if dinas := c.Query("dinas"); dinas != "" { - filter.Dinas = &dinas - } - - if kelompokObyek := c.Query("kelompok_obyek"); kelompokObyek != "" { - filter.KelompokObyek = &kelompokObyek - } - - if search := c.Query("search"); search != "" { - filter.Search = &search - } - - // Parse date filters - if dateFromStr := c.Query("date_from"); dateFromStr != "" { - if dateFrom, err := time.Parse("2006-01-02", dateFromStr); err == nil { - filter.DateFrom = &dateFrom - } - } - - if dateToStr := c.Query("date_to"); dateToStr != "" { - if dateTo, err := time.Parse("2006-01-02", dateToStr); err == nil { - filter.DateTo = &dateTo - } - } - - return filter -} - -// Get comprehensive aggregate data dengan filter support -func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, filter retribusi.RetribusiFilter) (*models.AggregateData, error) { +// PERUBAHAN: Fungsi agregasi sekarang sepenuhnya menggunakan QueryBuilder dan menghilangkan SQL manual. +func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, filterGroups []queryUtils.FilterGroup) (*models.AggregateData, error) { aggregate := &models.AggregateData{ ByStatus: make(map[string]int), ByDinas: make(map[string]int), ByJenis: make(map[string]int), } - // Build where clause untuk filter - whereClause, args := h.buildWhereClause(filter) - - // Use concurrent execution untuk performance var wg sync.WaitGroup var mu sync.Mutex errChan := make(chan error, 4) @@ -1087,20 +592,22 @@ func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, wg.Add(1) go func() { defer wg.Done() - statusQuery := fmt.Sprintf(` - SELECT status, COUNT(*) - FROM data_retribusi - WHERE %s - GROUP BY status - ORDER BY status`, whereClause) - - rows, err := dbConn.QueryContext(ctx, statusQuery, args...) + query := queryUtils.DynamicQuery{ + From: "data_retribusi", + Fields: []queryUtils.SelectField{ + {Expression: "status"}, + {Expression: "COUNT(*)", Alias: "count"}, + }, + Filters: filterGroups, + GroupBy: []string{"status"}, + } + sql, args, _ := h.queryBuilder.BuildQuery(query) + rows, err := dbConn.QueryContext(ctx, sql, args...) if err != nil { errChan <- fmt.Errorf("status query failed: %w", err) return } defer rows.Close() - mu.Lock() for rows.Next() { var status string @@ -1121,31 +628,36 @@ func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, } } mu.Unlock() - - if err := rows.Err(); err != nil { - errChan <- fmt.Errorf("status iteration error: %w", err) - } }() // 2. Count by Dinas wg.Add(1) go func() { defer wg.Done() - dinasQuery := fmt.Sprintf(` - SELECT COALESCE("Dinas", 'Unknown') as dinas, COUNT(*) - FROM data_retribusi - WHERE %s AND "Dinas" IS NOT NULL AND TRIM("Dinas") != '' - GROUP BY "Dinas" - ORDER BY COUNT(*) DESC - LIMIT 10`, whereClause) - - rows, err := dbConn.QueryContext(ctx, dinasQuery, args...) + query := queryUtils.DynamicQuery{ + From: "data_retribusi", + Fields: []queryUtils.SelectField{ + {Expression: `COALESCE("Dinas", 'Unknown')`, Alias: "dinas"}, + {Expression: "COUNT(*)", Alias: "count"}, + }, + Filters: append(filterGroups, queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "Dinas", Operator: queryUtils.OpNotNull}, + {Column: "Dinas", Operator: queryUtils.OpNotEqual, Value: ""}, + }, + LogicOp: "AND", + }), + GroupBy: []string{`COALESCE("Dinas", 'Unknown')`}, + Sort: []queryUtils.SortField{{Column: "count", Order: "DESC"}}, + Limit: 10, + } + sql, args, _ := h.queryBuilder.BuildQuery(query) + rows, err := dbConn.QueryContext(ctx, sql, args...) if err != nil { errChan <- fmt.Errorf("dinas query failed: %w", err) return } defer rows.Close() - mu.Lock() for rows.Next() { var dinas string @@ -1158,31 +670,36 @@ func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, aggregate.ByDinas[dinas] = count } mu.Unlock() - - if err := rows.Err(); err != nil { - errChan <- fmt.Errorf("dinas iteration error: %w", err) - } }() // 3. Count by Jenis wg.Add(1) go func() { defer wg.Done() - jenisQuery := fmt.Sprintf(` - SELECT COALESCE("Jenis", 'Unknown') as jenis, COUNT(*) - FROM data_retribusi - WHERE %s AND "Jenis" IS NOT NULL AND TRIM("Jenis") != '' - GROUP BY "Jenis" - ORDER BY COUNT(*) DESC - LIMIT 10`, whereClause) - - rows, err := dbConn.QueryContext(ctx, jenisQuery, args...) + query := queryUtils.DynamicQuery{ + From: "data_retribusi", + Fields: []queryUtils.SelectField{ + {Expression: `COALESCE("Jenis", 'Unknown')`, Alias: "jenis"}, + {Expression: "COUNT(*)", Alias: "count"}, + }, + Filters: append(filterGroups, queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "Jenis", Operator: queryUtils.OpNotNull}, + {Column: "Jenis", Operator: queryUtils.OpNotEqual, Value: ""}, + }, + LogicOp: "AND", + }), + GroupBy: []string{`COALESCE("Jenis", 'Unknown')`}, + Sort: []queryUtils.SortField{{Column: "count", Order: "DESC"}}, + Limit: 10, + } + sql, args, _ := h.queryBuilder.BuildQuery(query) + rows, err := dbConn.QueryContext(ctx, sql, args...) if err != nil { errChan <- fmt.Errorf("jenis query failed: %w", err) return } defer rows.Close() - mu.Lock() for rows.Next() { var jenis string @@ -1195,41 +712,46 @@ func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, aggregate.ByJenis[jenis] = count } mu.Unlock() - - if err := rows.Err(); err != nil { - errChan <- fmt.Errorf("jenis iteration error: %w", err) - } }() - // 4. Get last updated time dan today statistics + // 4. Get last updated and today's stats wg.Add(1) go func() { defer wg.Done() - // Last updated - lastUpdatedQuery := fmt.Sprintf(` - SELECT MAX(date_updated) - FROM data_retribusi - WHERE %s AND date_updated IS NOT NULL`, whereClause) - + query1 := queryUtils.DynamicQuery{ + From: "data_retribusi", + Fields: []queryUtils.SelectField{{Expression: "MAX(date_updated)"}}, + Filters: filterGroups, + } + sql1, args1, _ := h.queryBuilder.BuildQuery(query1) var lastUpdated sql.NullTime - if err := dbConn.QueryRowContext(ctx, lastUpdatedQuery, args...).Scan(&lastUpdated); err != nil { + if err := dbConn.QueryRowContext(ctx, sql1, args1...).Scan(&lastUpdated); err != nil { errChan <- fmt.Errorf("last updated query failed: %w", err) return } - // Today statistics + // PERUBAHAN: Hari ini, kita bangun WHERE clause menggunakan QueryBuilder untuk keamanan, + // lalu sisipkan ke query manual untuk CASE statement. + whereQuery := queryUtils.DynamicQuery{From: "data_retribusi", Filters: filterGroups} + whereSQL, whereArgs, err := h.queryBuilder.BuildWhereClause(whereQuery.Filters) + if err != nil { + errChan <- fmt.Errorf("failed to build where clause for stats: %w", err) + return + } + today := time.Now().Format("2006-01-02") + // PERUBAHAN: Menggunakan fmt.Sprintf dengan placeholder yang benar untuk PostgreSQL ($1, $2) todayStatsQuery := fmt.Sprintf(` SELECT - SUM(CASE WHEN DATE(date_created) = $%d THEN 1 ELSE 0 END) as created_today, - SUM(CASE WHEN DATE(date_updated) = $%d AND DATE(date_created) != $%d THEN 1 ELSE 0 END) as updated_today - FROM data_retribusi - WHERE %s`, len(args)+1, len(args)+1, len(args)+1, whereClause) + SUM(CASE WHEN DATE(date_created) = $1 THEN 1 ELSE 0 END) as created_today, + SUM(CASE WHEN DATE(date_updated) = $1 AND DATE(date_created) != $1 THEN 1 ELSE 0 END) as updated_today + FROM data_retribusi WHERE %s`, whereSQL) - todayArgs := append(args, today) + // Argumen pertama untuk 'today', sisanya untuk where clause + args := append([]interface{}{today}, whereArgs...) var createdToday, updatedToday int - if err := dbConn.QueryRowContext(ctx, todayStatsQuery, todayArgs...).Scan(&createdToday, &updatedToday); err != nil { + if err := dbConn.QueryRowContext(ctx, todayStatsQuery, args...).Scan(&createdToday, &updatedToday); err != nil { errChan <- fmt.Errorf("today stats query failed: %w", err) return } @@ -1243,11 +765,9 @@ func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, mu.Unlock() }() - // Wait for all goroutines wg.Wait() close(errChan) - // Check for errors for err := range errChan { if err != nil { return nil, err @@ -1257,145 +777,80 @@ func (h *RetribusiHandler) getAggregateData(ctx context.Context, dbConn *sql.DB, return aggregate, nil } -// Get total count dengan filter support -func (h *RetribusiHandler) getTotalCount(ctx context.Context, dbConn *sql.DB, filter retribusi.RetribusiFilter, total *int) error { - whereClause, args := h.buildWhereClause(filter) - countQuery := fmt.Sprintf(`SELECT COUNT(*) FROM data_retribusi WHERE %s`, whereClause) - - if err := dbConn.QueryRowContext(ctx, countQuery, args...).Scan(total); err != nil { - return fmt.Errorf("total count query failed: %w", err) - } - - return nil -} - -// Enhanced fetchRetribusis dengan filter support -func (h *RetribusiHandler) fetchRetribusis(ctx context.Context, dbConn *sql.DB, filter retribusi.RetribusiFilter, limit, offset int) ([]retribusi.Retribusi, error) { - whereClause, args := h.buildWhereClause(filter) - - // Build the main query with pagination - query := fmt.Sprintf(` - SELECT - id, status, sort, user_created, date_created, user_updated, date_updated, - "Jenis", "Pelayanan", "Dinas", "Kelompok_obyek", "Kode_tarif", - "Tarif", "Satuan", "Tarif_overtime", "Satuan_overtime", - "Rekening_pokok", "Rekening_denda", "Uraian_1", "Uraian_2", "Uraian_3" - FROM data_retribusi - WHERE %s - ORDER BY date_created DESC NULLS LAST - LIMIT $%d OFFSET $%d`, - whereClause, len(args)+1, len(args)+2) - - // Add pagination parameters - args = append(args, limit, offset) - - rows, err := dbConn.QueryContext(ctx, query, args...) +// fetchWithSQL executes a query and scans the results into a slice of Retribusi +func (h *RetribusiHandler) fetchWithSQL(ctx context.Context, dbConn *sql.DB, sql string, args []interface{}) ([]retribusi.Retribusi, error) { + rows, err := dbConn.QueryContext(ctx, sql, args...) if err != nil { - return nil, fmt.Errorf("fetch retribusis query failed: %w", err) + return nil, err } defer rows.Close() - // Pre-allocate slice dengan kapasitas yang tepat - retribusis := make([]retribusi.Retribusi, 0, limit) - + var retribusis []retribusi.Retribusi for rows.Next() { retribusi, err := h.scanRetribusi(rows) if err != nil { - return nil, fmt.Errorf("scan retribusi failed: %w", err) + return nil, err } retribusis = append(retribusis, retribusi) } - - if err := rows.Err(); err != nil { - return nil, fmt.Errorf("rows iteration error: %w", err) - } - - logger.Info("Successfully fetched retribusis", map[string]interface{}{ - "count": len(retribusis), - "limit": limit, - "offset": offset, - }) - return retribusis, nil + return retribusis, rows.Err() } -// Calculate pagination metadata -func (h *RetribusiHandler) calculateMeta(limit, offset, total int) models.MetaResponse { - totalPages := 0 - currentPage := 1 +// scanRetribusi scans a single row from a sql.Rows object into a Retribusi struct +func (h *RetribusiHandler) scanRetribusi(rows *sql.Rows) (retribusi.Retribusi, error) { + var r retribusi.Retribusi + err := rows.Scan( + &r.ID, &r.Status, &r.Sort, &r.UserCreated, &r.DateCreated, &r.UserUpdated, &r.DateUpdated, + &r.Jenis, &r.Pelayanan, &r.Dinas, &r.KelompokObyek, &r.KodeTarif, + &r.Tarif, &r.Satuan, &r.TarifOvertime, &r.SatuanOvertime, + &r.RekeningPokok, &r.RekeningDenda, &r.Uraian1, &r.Uraian2, &r.Uraian3, + ) + return r, err +} +// scanRetribusiFromRow scans a single sql.Row object into a Retribusi struct +func (h *RetribusiHandler) scanRetribusiFromRow(row *sql.Row) (retribusi.Retribusi, error) { + var r retribusi.Retribusi + err := row.Scan( + &r.ID, &r.Status, &r.Sort, &r.UserCreated, &r.DateCreated, &r.UserUpdated, &r.DateUpdated, + &r.Jenis, &r.Pelayanan, &r.Dinas, &r.KelompokObyek, &r.KodeTarif, + &r.Tarif, &r.Satuan, &r.TarifOvertime, &r.SatuanOvertime, + &r.RekeningPokok, &r.RekeningDenda, &r.Uraian1, &r.Uraian2, &r.Uraian3, + ) + return r, err +} + +// logAndRespondError logs an error and sends a JSON response +func (h *RetribusiHandler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) { + logger.Error(message, map[string]interface{}{"error": err.Error(), "status_code": statusCode}) + h.respondError(c, message, err, statusCode) +} + +// respondError sends a standardized JSON error response +func (h *RetribusiHandler) respondError(c *gin.Context, message string, err error, statusCode int) { + errorMessage := message + if gin.Mode() == gin.ReleaseMode { + errorMessage = "Internal server error" + } + c.JSON(statusCode, models.ErrorResponse{Error: errorMessage, Code: statusCode, Message: err.Error(), Timestamp: time.Now()}) +} + +// calculateMeta creates pagination metadata +func (h *RetribusiHandler) calculateMeta(limit, offset, total int) models.MetaResponse { + totalPages, currentPage := 0, 1 if limit > 0 { - totalPages = (total + limit - 1) / limit // Ceiling division + totalPages = (total + limit - 1) / limit currentPage = (offset / limit) + 1 } - return models.MetaResponse{ - Limit: limit, - Offset: offset, - Total: total, - TotalPages: totalPages, - CurrentPage: currentPage, - HasNext: offset+limit < total, - HasPrev: offset > 0, + Limit: limit, Offset: offset, Total: total, TotalPages: totalPages, + CurrentPage: currentPage, HasNext: offset+limit < total, HasPrev: offset > 0, } } -// validateRetribusiSubmission performs validation for duplicate entries and daily submission limits +// validateRetribusiSubmission can be used for custom business logic validation before creation/update func (h *RetribusiHandler) validateRetribusiSubmission(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiCreateRequest) error { - // Import the validation utility - validator := validation.NewDuplicateValidator(dbConn) - - // Use default retribusi configuration - config := validation.DefaultRetribusiConfig() - - // Validate duplicate entries with active status for today - err := validator.ValidateDuplicate(ctx, config, "dummy_id") - if err != nil { - return fmt.Errorf("validation failed: %w", err) - } - - // Validate once per day submission - err = validator.ValidateOncePerDay(ctx, "data_retribusi", "id", "date_created", "daily_limit") - if err != nil { - return fmt.Errorf("daily submission limit exceeded: %w", err) - } - + // TODO: Implementasikan validasi duplikat atau logika bisnis lainnya di sini. + // Contoh: validasi bahwa KodeTarif belum ada. return nil } - -// Example usage of the validation utility with custom configuration -func (h *RetribusiHandler) validateWithCustomConfig(ctx context.Context, dbConn *sql.DB, req *retribusi.RetribusiCreateRequest) error { - // Create validator instance - validator := validation.NewDuplicateValidator(dbConn) - - // Use custom configuration - config := validation.ValidationConfig{ - TableName: "data_retribusi", - IDColumn: "id", - StatusColumn: "status", - DateColumn: "date_created", - ActiveStatuses: []string{"active", "draft"}, - AdditionalFields: map[string]interface{}{ - "jenis": req.Jenis, - "dinas": req.Dinas, - }, - } - - // Validate with custom fields - fields := map[string]interface{}{ - "jenis": *req.Jenis, - "dinas": *req.Dinas, - } - - err := validator.ValidateDuplicateWithCustomFields(ctx, config, fields) - if err != nil { - return fmt.Errorf("custom validation failed: %w", err) - } - - return nil -} - -// GetLastSubmissionTime example -func (h *RetribusiHandler) getLastSubmissionTimeExample(ctx context.Context, dbConn *sql.DB, identifier string) (*time.Time, error) { - validator := validation.NewDuplicateValidator(dbConn) - return validator.GetLastSubmissionTime(ctx, "data_retribusi", "id", "date_created", identifier) -} diff --git a/internal/middleware/auth.go b/internal/middleware/auth.go new file mode 100644 index 0000000..5321fce --- /dev/null +++ b/internal/middleware/auth.go @@ -0,0 +1,305 @@ +package middleware + +import ( + "api-service/internal/config" + "api-service/internal/models/auth" + service "api-service/internal/services/auth" + "api-service/pkg/logger" + "errors" + "fmt" + "net/http" + "strings" + "sync" + "time" + + "github.com/gin-gonic/gin" + "golang.org/x/time/rate" +) + +var ( + ErrInvalidToken = errors.New("invalid token") + ErrTokenExpired = errors.New("token expired") + ErrInvalidSignature = errors.New("invalid token signature") + ErrInvalidIssuer = errors.New("invalid token issuer") + ErrInvalidAudience = errors.New("invalid token audience") + ErrMissingClaims = errors.New("required claims missing") + ErrInvalidAuthHeader = errors.New("invalid authorization header format") + ErrMissingAuthHeader = errors.New("authorization header missing") +) + +// TokenCache interface for token caching +type TokenCache interface { + Get(tokenString string) (*auth.JWTClaims, bool) + Set(tokenString string, claims *auth.JWTClaims, expiration time.Duration) + Delete(tokenString string) +} + +// InMemoryTokenCache implements TokenCache with in-memory storage +type InMemoryTokenCache struct { + tokens map[string]cacheEntry + mu sync.RWMutex +} + +type cacheEntry struct { + claims *auth.JWTClaims + expiration time.Time +} + +func NewInMemoryTokenCache() *InMemoryTokenCache { + cache := &InMemoryTokenCache{ + tokens: make(map[string]cacheEntry), + } + + // Start cleanup goroutine + go cache.cleanup() + + return cache +} + +func (c *InMemoryTokenCache) Get(tokenString string) (*auth.JWTClaims, bool) { + c.mu.RLock() + defer c.mu.RUnlock() + + entry, exists := c.tokens[tokenString] + if !exists || time.Now().After(entry.expiration) { + return nil, false + } + + return entry.claims, true +} + +func (c *InMemoryTokenCache) Set(tokenString string, claims *auth.JWTClaims, expiration time.Duration) { + c.mu.Lock() + defer c.mu.Unlock() + + c.tokens[tokenString] = cacheEntry{ + claims: claims, + expiration: time.Now().Add(expiration), + } +} + +func (c *InMemoryTokenCache) Delete(tokenString string) { + c.mu.Lock() + defer c.mu.Unlock() + + delete(c.tokens, tokenString) +} + +func (c *InMemoryTokenCache) cleanup() { + ticker := time.NewTicker(5 * time.Minute) + defer ticker.Stop() + + for range ticker.C { + c.mu.Lock() + now := time.Now() + for token, entry := range c.tokens { + if now.After(entry.expiration) { + delete(c.tokens, token) + } + } + c.mu.Unlock() + } +} + +// AuthMiddleware provides authentication with rate limiting and caching +type AuthMiddleware struct { + providers []AuthProvider + tokenCache TokenCache + rateLimiter *rate.Limiter + config *config.Config +} + +func NewAuthMiddleware( + cfg *config.Config, + authService *service.AuthService, + tokenCache TokenCache, +) *AuthMiddleware { + factory := NewProviderFactory(authService, cfg) + providers := factory.CreateProviders() + + // Rate limit: 10 requests per second with burst of 20 + limiter := rate.NewLimiter(10, 20) + + // Use default cache if none provided + if tokenCache == nil { + tokenCache = NewInMemoryTokenCache() + } + + return &AuthMiddleware{ + providers: providers, + tokenCache: tokenCache, + rateLimiter: limiter, + config: cfg, + } +} + +// RequireAuth enforces authentication +func (m *AuthMiddleware) RequireAuth() gin.HandlerFunc { + return m.authenticate(false) +} + +// OptionalAuth allows both authenticated and unauthenticated requests +func (m *AuthMiddleware) OptionalAuth() gin.HandlerFunc { + return m.authenticate(true) +} + +// authenticate is the core authentication logic +func (m *AuthMiddleware) authenticate(optional bool) gin.HandlerFunc { + return func(c *gin.Context) { + reqLogger := logger.Default().WithService("auth-middleware") + reqLogger.Info("Starting authentication", map[string]interface{}{ + "path": c.Request.URL.Path, + "optional": optional, + }) + + // Apply rate limiting + if !m.rateLimiter.Allow() { + reqLogger.Warn("Rate limit exceeded") + c.AbortWithStatusJSON(http.StatusTooManyRequests, gin.H{ + "error": "rate limit exceeded", + }) + return + } + + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + if optional { + c.Next() + return + } + + reqLogger.Warn("Authorization header missing") + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{ + "error": ErrMissingAuthHeader.Error(), + }) + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + if optional { + c.Next() + return + } + + reqLogger.Warn("Invalid authorization header format") + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{ + "error": ErrInvalidAuthHeader.Error(), + }) + return + } + + tokenString := parts[1] + + // Check cache first + if claims, found := m.tokenCache.Get(tokenString); found { + reqLogger.Info("Token retrieved from cache", map[string]interface{}{ + "user_id": claims.UserID, + }) + + m.setUserInfo(c, claims, "cache") + c.Next() + return + } + + // Try each provider until one succeeds + var validatedClaims *auth.JWTClaims + var err error + var providerName string + var providerErrors []string + + for _, provider := range m.providers { + providerLog := reqLogger.WithField("provider", provider.Name()) + providerLog.Info("Trying provider") + + validatedClaims, err = provider.ValidateToken(tokenString) + if err == nil { + providerName = provider.Name() + providerLog.Info("Authentication successful", map[string]interface{}{ + "user_id": validatedClaims.UserID, + }) + break + } + + providerLog.Warn("Provider validation failed", map[string]interface{}{ + "error": err.Error(), + }) + providerErrors = append(providerErrors, fmt.Sprintf("provider %s: %v", provider.Name(), err)) + } + + if err != nil { + if optional { + c.Next() + return + } + + reqLogger.Error("All providers failed", map[string]interface{}{ + "errors": strings.Join(providerErrors, "; "), + }) + + // Return specific error message based on the error type + errorMessage := "Token tidak valid" + if errors.Is(err, ErrTokenExpired) { + errorMessage = "Token telah kadaluarsa" + } else if errors.Is(err, ErrInvalidSignature) { + errorMessage = "Signature token tidak valid" + } else if errors.Is(err, ErrInvalidIssuer) { + errorMessage = "Issuer token tidak valid" + } else if errors.Is(err, ErrInvalidAudience) { + errorMessage = "Audience token tidak valid" + } + + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{ + "error": errorMessage, + "details": strings.Join(providerErrors, "; "), + }) + return + } + + // Cache the validated token + m.tokenCache.Set(tokenString, validatedClaims, 5*time.Minute) + + // Set user info in context + m.setUserInfo(c, validatedClaims, providerName) + c.Next() + } +} + +// setUserInfo sets user information in the Gin context +func (m *AuthMiddleware) setUserInfo(c *gin.Context, claims *auth.JWTClaims, providerName string) { + c.Set("user_id", claims.UserID) + c.Set("username", claims.Username) + c.Set("email", claims.Email) + c.Set("role", claims.Role) + c.Set("auth_provider", providerName) +} + +// RequireRole creates a middleware that requires a specific role +func (m *AuthMiddleware) RequireRole(requiredRole string) gin.HandlerFunc { + return func(c *gin.Context) { + role, exists := c.Get("role") + if !exists { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{ + "error": "user role not found", + }) + return + } + + userRole, ok := role.(string) + if !ok { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{ + "error": "invalid role format", + }) + return + } + + if userRole != requiredRole { + c.AbortWithStatusJSON(http.StatusForbidden, gin.H{ + "error": fmt.Sprintf("requires %s role", requiredRole), + }) + return + } + + c.Next() + } +} diff --git a/internal/middleware/auth_middleware.go b/internal/middleware/auth_middleware.go deleted file mode 100644 index 1d3969c..0000000 --- a/internal/middleware/auth_middleware.go +++ /dev/null @@ -1,59 +0,0 @@ -package middleware - -import ( - "fmt" - "net/http" - - "api-service/internal/config" - - "github.com/gin-gonic/gin" -) - -// ConfigurableAuthMiddleware provides flexible authentication based on configuration -func ConfigurableAuthMiddleware(cfg *config.Config) gin.HandlerFunc { - return func(c *gin.Context) { - // Skip authentication for development/testing if explicitly disabled - if !cfg.Keycloak.Enabled { - fmt.Println("Authentication is disabled - allowing all requests") - c.Next() - return - } - - // Use Keycloak authentication when enabled - AuthMiddleware()(c) - } -} - -// StrictAuthMiddleware enforces authentication regardless of Keycloak.Enabled setting -func StrictAuthMiddleware() gin.HandlerFunc { - return func(c *gin.Context) { - if appConfig == nil { - fmt.Println("AuthMiddleware: Config not initialized") - c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "authentication service not configured"}) - return - } - - // Always enforce authentication - AuthMiddleware()(c) - } -} - -// OptionalKeycloakAuthMiddleware allows requests but adds authentication info if available -func OptionalKeycloakAuthMiddleware() gin.HandlerFunc { - return func(c *gin.Context) { - if appConfig == nil || !appConfig.Keycloak.Enabled { - c.Next() - return - } - - authHeader := c.GetHeader("Authorization") - if authHeader == "" { - // No token provided, but continue - c.Next() - return - } - - // Try to validate token, but don't fail if invalid - AuthMiddleware()(c) - } -} diff --git a/internal/middleware/error_handler.go b/internal/middleware/error_handler.go index 7f6ab82..7c19618 100644 --- a/internal/middleware/error_handler.go +++ b/internal/middleware/error_handler.go @@ -36,19 +36,3 @@ func ErrorHandler() gin.HandlerFunc { } } } - -// CORS middleware configuration -func CORSConfig() gin.HandlerFunc { - return gin.HandlerFunc(func(c *gin.Context) { - c.Header("Access-Control-Allow-Origin", "*") - c.Header("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS, PATCH") - c.Header("Access-Control-Allow-Headers", "Origin, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization") - - if c.Request.Method == "OPTIONS" { - c.AbortWithStatus(204) - return - } - - c.Next() - }) -} diff --git a/internal/middleware/keycloak_middleware.go b/internal/middleware/keycloak_middleware.go deleted file mode 100644 index a336154..0000000 --- a/internal/middleware/keycloak_middleware.go +++ /dev/null @@ -1,254 +0,0 @@ -package middleware - -/** Keycloak Auth Middleware **/ -import ( - "crypto/rsa" - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "math/big" - "net/http" - "strings" - "sync" - "time" - - "api-service/internal/config" - - "github.com/gin-gonic/gin" - "github.com/golang-jwt/jwt/v5" - "golang.org/x/sync/singleflight" -) - -var ( - ErrInvalidToken = errors.New("invalid token") -) - -// JwksCache caches JWKS keys with expiration -type JwksCache struct { - mu sync.RWMutex - keys map[string]*rsa.PublicKey - expiresAt time.Time - sfGroup singleflight.Group - config *config.Config -} - -func NewJwksCache(cfg *config.Config) *JwksCache { - return &JwksCache{ - keys: make(map[string]*rsa.PublicKey), - config: cfg, - } -} - -func (c *JwksCache) GetKey(kid string) (*rsa.PublicKey, error) { - c.mu.RLock() - if key, ok := c.keys[kid]; ok && time.Now().Before(c.expiresAt) { - c.mu.RUnlock() - return key, nil - } - c.mu.RUnlock() - - // Fetch keys with singleflight to avoid concurrent fetches - v, err, _ := c.sfGroup.Do("fetch_jwks", func() (interface{}, error) { - return c.fetchKeys() - }) - if err != nil { - return nil, err - } - - keys := v.(map[string]*rsa.PublicKey) - - c.mu.Lock() - c.keys = keys - c.expiresAt = time.Now().Add(1 * time.Hour) // cache for 1 hour - c.mu.Unlock() - - key, ok := keys[kid] - if !ok { - return nil, fmt.Errorf("key with kid %s not found", kid) - } - return key, nil -} - -func (c *JwksCache) fetchKeys() (map[string]*rsa.PublicKey, error) { - if !c.config.Keycloak.Enabled { - return nil, fmt.Errorf("keycloak authentication is disabled") - } - - jwksURL := c.config.Keycloak.JwksURL - if jwksURL == "" { - // Construct JWKS URL from issuer if not explicitly provided - jwksURL = c.config.Keycloak.Issuer + "/protocol/openid-connect/certs" - } - - resp, err := http.Get(jwksURL) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - var jwksData struct { - Keys []struct { - Kid string `json:"kid"` - Kty string `json:"kty"` - N string `json:"n"` - E string `json:"e"` - } `json:"keys"` - } - - if err := json.NewDecoder(resp.Body).Decode(&jwksData); err != nil { - return nil, err - } - - keys := make(map[string]*rsa.PublicKey) - for _, key := range jwksData.Keys { - if key.Kty != "RSA" { - continue - } - pubKey, err := parseRSAPublicKey(key.N, key.E) - if err != nil { - continue - } - keys[key.Kid] = pubKey - } - return keys, nil -} - -// parseRSAPublicKey parses RSA public key components from base64url strings -func parseRSAPublicKey(nStr, eStr string) (*rsa.PublicKey, error) { - nBytes, err := base64UrlDecode(nStr) - if err != nil { - return nil, err - } - eBytes, err := base64UrlDecode(eStr) - if err != nil { - return nil, err - } - - var eInt int - for _, b := range eBytes { - eInt = eInt<<8 + int(b) - } - - pubKey := &rsa.PublicKey{ - N: new(big.Int).SetBytes(nBytes), - E: eInt, - } - return pubKey, nil -} - -func base64UrlDecode(s string) ([]byte, error) { - // Add padding if missing - if m := len(s) % 4; m != 0 { - s += strings.Repeat("=", 4-m) - } - return base64.URLEncoding.DecodeString(s) -} - -// Global config instance -var appConfig *config.Config -var jwksCacheInstance *JwksCache - -// InitializeAuth initializes the auth middleware with config -func InitializeAuth(cfg *config.Config) { - appConfig = cfg - jwksCacheInstance = NewJwksCache(cfg) -} - -// AuthMiddleware validates Bearer token as Keycloak JWT token -func AuthMiddleware() gin.HandlerFunc { - return func(c *gin.Context) { - if appConfig == nil { - fmt.Println("AuthMiddleware: Config not initialized") - c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "authentication service not configured"}) - return - } - - if !appConfig.Keycloak.Enabled { - // Skip authentication if Keycloak is disabled but log for debugging - fmt.Println("AuthMiddleware: Keycloak authentication is disabled - allowing all requests") - c.Next() - return - } - - fmt.Println("AuthMiddleware: Checking Authorization header") // Debug log - - authHeader := c.GetHeader("Authorization") - if authHeader == "" { - fmt.Println("AuthMiddleware: Authorization header missing") // Debug log - c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header missing"}) - return - } - - parts := strings.SplitN(authHeader, " ", 2) - if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { - fmt.Println("AuthMiddleware: Invalid Authorization header format") // Debug log - c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header format must be Bearer {token}"}) - return - } - - tokenString := parts[1] - - token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { - // Verify signing method - if _, ok := token.Method.(*jwt.SigningMethodRSA); !ok { - fmt.Printf("AuthMiddleware: Unexpected signing method: %v\n", token.Header["alg"]) // Debug log - return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) - } - - kid, ok := token.Header["kid"].(string) - if !ok { - fmt.Println("AuthMiddleware: kid header not found") // Debug log - return nil, errors.New("kid header not found") - } - - return jwksCacheInstance.GetKey(kid) - }, jwt.WithIssuer(appConfig.Keycloak.Issuer), jwt.WithAudience(appConfig.Keycloak.Audience)) - - if err != nil || !token.Valid { - fmt.Printf("AuthMiddleware: Invalid or expired token: %v\n", err) // Debug log - c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid or expired token"}) - return - } - - fmt.Println("AuthMiddleware: Token valid, proceeding") // Debug log - // Token is valid, proceed - c.Next() - } -} - -/** JWT Bearer authentication middleware */ -// import ( -// "net/http" -// "strings" - -// "github.com/gin-gonic/gin" -// ) - -// AuthMiddleware validates Bearer token in Authorization header -func AuthJWTMiddleware() gin.HandlerFunc { - return func(c *gin.Context) { - authHeader := c.GetHeader("Authorization") - if authHeader == "" { - c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header missing"}) - return - } - - parts := strings.SplitN(authHeader, " ", 2) - if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { - c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header format must be Bearer {token}"}) - return - } - - token := parts[1] - // For now, use a static token for validation. Replace with your logic. - const validToken = "your-static-token" - - if token != validToken { - c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid token"}) - return - } - - c.Next() - } -} diff --git a/internal/middleware/providers.go b/internal/middleware/providers.go new file mode 100644 index 0000000..6a32631 --- /dev/null +++ b/internal/middleware/providers.go @@ -0,0 +1,615 @@ +package middleware + +import ( + "api-service/internal/config" + "api-service/internal/models/auth" + models "api-service/internal/models/auth" + service "api-service/internal/services/auth" + "api-service/pkg/logger" + "crypto/rsa" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "math/big" + "net/http" + "strings" + "sync" + "time" + + "github.com/gin-gonic/gin" + "github.com/golang-jwt/jwt/v5" + "golang.org/x/sync/singleflight" +) + +// AuthProvider interface for different authentication methods +type AuthProvider interface { + ValidateToken(tokenString string) (*models.JWTClaims, error) + Name() string +} + +// ProviderFactory creates authentication providers based on configuration +type ProviderFactory struct { + authService *service.AuthService + config *config.Config +} + +func NewProviderFactory(authService *service.AuthService, config *config.Config) *ProviderFactory { + return &ProviderFactory{ + authService: authService, + config: config, + } +} + +func (f *ProviderFactory) CreateProviders() []AuthProvider { + var providers []AuthProvider + + reqLogger := logger.Default().WithService("provider-factory") + reqLogger.Info("Creating authentication providers", map[string]interface{}{ + "auth_type": f.config.Auth.Type, + "keycloak_enabled": f.config.Keycloak.Enabled, + "keycloak_issuer": f.config.Keycloak.Issuer, + "static_tokens_len": len(f.config.Auth.StaticTokens), + "fallback_to": f.config.Auth.FallbackTo, + }) + + switch f.config.Auth.Type { + case "static": + reqLogger.Info("Configuring static token provider") + if len(f.config.Auth.StaticTokens) > 0 { + providers = append(providers, NewStaticTokenProvider(f.config.Auth.StaticTokens)) + reqLogger.Info("Static token provider added", map[string]interface{}{ + "token_count": len(f.config.Auth.StaticTokens), + }) + } else { + reqLogger.Warn("No static tokens configured for static auth type") + } + case "jwt": + reqLogger.Info("Configuring JWT provider") + providers = append(providers, NewJWTAuthProvider(f.authService)) + reqLogger.Info("JWT provider added") + case "keycloak": + reqLogger.Info("Configuring Keycloak provider") + if f.config.Keycloak.Issuer != "" { + providers = append(providers, NewKeycloakAuthProvider(f.config)) + reqLogger.Info("Keycloak provider added") + } else { + reqLogger.Warn("Keycloak issuer not configured for keycloak auth type") + } + case "hybrid": + reqLogger.Info("Configuring hybrid providers") + if f.config.Keycloak.Issuer != "" { + providers = append(providers, NewKeycloakAuthProvider(f.config)) + reqLogger.Info("Keycloak provider added for hybrid") + } else { + reqLogger.Warn("Keycloak issuer not configured for hybrid auth type") + } + switch f.config.Auth.FallbackTo { + case "static": + reqLogger.Info("Configuring static fallback for hybrid") + if len(f.config.Auth.StaticTokens) > 0 { + providers = append(providers, NewStaticTokenProvider(f.config.Auth.StaticTokens)) + reqLogger.Info("Static fallback provider added", map[string]interface{}{ + "token_count": len(f.config.Auth.StaticTokens), + }) + } else { + reqLogger.Warn("No static tokens configured for hybrid fallback") + } + case "jwt": + reqLogger.Info("Configuring JWT fallback for hybrid") + providers = append(providers, NewJWTAuthProvider(f.authService)) + reqLogger.Info("JWT fallback provider added") + case "keycloak": + reqLogger.Info("Configuring Keycloak fallback for hybrid") + if f.config.Keycloak.Issuer != "" { + providers = append(providers, NewKeycloakAuthProvider(f.config)) + reqLogger.Info("Keycloak fallback provider added") + } else { + reqLogger.Warn("Keycloak issuer not configured for hybrid fallback") + } + default: + reqLogger.Warn("Unknown fallback type for hybrid, using JWT", map[string]interface{}{ + "fallback_to": f.config.Auth.FallbackTo, + }) + providers = append(providers, NewJWTAuthProvider(f.authService)) + reqLogger.Info("JWT fallback provider added as default") + } + default: + reqLogger.Warn("Unknown auth type, defaulting to JWT", map[string]interface{}{ + "auth_type": f.config.Auth.Type, + }) + providers = append(providers, NewJWTAuthProvider(f.authService)) + reqLogger.Info("JWT provider added as default") + } + + reqLogger.Info("Provider creation completed", map[string]interface{}{ + "provider_count": len(providers), + }) + + return providers +} + +// StaticTokenProvider handles static token authentication +type StaticTokenProvider struct { + tokens map[string]bool +} + +func NewStaticTokenProvider(tokens []string) *StaticTokenProvider { + tokenMap := make(map[string]bool) + for _, token := range tokens { + if token != "" { + tokenMap[token] = true + } + } + return &StaticTokenProvider{tokens: tokenMap} +} + +func (s *StaticTokenProvider) ValidateToken(tokenString string) (*models.JWTClaims, error) { + reqLogger := logger.Default().WithService("static-auth") + + if !s.tokens[tokenString] { + reqLogger.Warn("Invalid static token provided") + return nil, ErrInvalidToken + } + + reqLogger.Info("Static token validation successful") + return &models.JWTClaims{ + UserID: "static-user", + Username: "static-user", + Email: "static@example.com", + Role: "user", + }, nil +} + +func (s *StaticTokenProvider) Name() string { + return "static" +} + +// JWTAuthProvider handles JWT authentication using AuthService +type JWTAuthProvider struct { + authService *service.AuthService +} + +func NewJWTAuthProvider(authService *service.AuthService) *JWTAuthProvider { + return &JWTAuthProvider{authService: authService} +} + +func (j *JWTAuthProvider) ValidateToken(tokenString string) (*models.JWTClaims, error) { + reqLogger := logger.Default().WithService("jwt-auth") + reqLogger.Info("Starting JWT token validation") + + claims, err := j.authService.ValidateToken(tokenString) + if err != nil { + reqLogger.Error("JWT validation failed", map[string]interface{}{ + "error": err.Error(), + }) + return nil, err + } + + reqLogger.Info("JWT validation successful", map[string]interface{}{ + "user_id": claims.UserID, + }) + return claims, nil +} + +func (j *JWTAuthProvider) Name() string { + return "jwt" +} + +// KeycloakAuthProvider handles Keycloak JWT authentication +type KeycloakAuthProvider struct { + jwksCache *JwksCache + config *config.Config +} + +func NewKeycloakAuthProvider(cfg *config.Config) *KeycloakAuthProvider { + return &KeycloakAuthProvider{ + jwksCache: NewJwksCache(cfg), + config: cfg, + } +} + +func (k *KeycloakAuthProvider) ValidateToken(tokenString string) (*auth.JWTClaims, error) { + reqLogger := logger.Default().WithService("keycloak-auth") + reqLogger.Info("Starting Keycloak token validation") + + // Parse token without verification first to get claims for logging + parsedToken, _, err := jwt.NewParser().ParseUnverified(tokenString, jwt.MapClaims{}) + if err != nil { + reqLogger.Error("Failed to parse token", map[string]interface{}{ + "error": err.Error(), + }) + return nil, ErrInvalidToken + } + + // Extract claims for logging + claims, ok := parsedToken.Claims.(jwt.MapClaims) + if !ok { + reqLogger.Error("Invalid claims format") + return nil, ErrMissingClaims + } + + // Check if token is expired + if exp, ok := claims["exp"].(float64); ok { + if time.Now().Unix() > int64(exp) { + reqLogger.Warn("Token expired", map[string]interface{}{ + "exp": exp, + "now": time.Now().Unix(), + }) + return nil, ErrTokenExpired + } + } + + // Now parse with verification + token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { + // Verify signing method + if _, ok := token.Method.(*jwt.SigningMethodRSA); !ok { + reqLogger.Warn("Unexpected signing method", map[string]interface{}{ + "alg": token.Header["alg"], + }) + return nil, ErrInvalidSignature + } + + kid, ok := token.Header["kid"].(string) + if !ok { + reqLogger.Warn("kid header not found in token") + return nil, errors.New("kid header not found") + } + + reqLogger.Info("Looking for key", map[string]interface{}{ + "kid": kid, + }) + key, err := k.jwksCache.GetKey(kid) + if err != nil { + reqLogger.Error("Failed to get key", map[string]interface{}{ + "kid": kid, + "error": err.Error(), + }) + return nil, err + } + reqLogger.Info("Key retrieved successfully", map[string]interface{}{ + "kid": kid, + }) + return key, nil + }, jwt.WithIssuer(k.config.Keycloak.Issuer), jwt.WithAudience(k.config.Keycloak.Audience)) + + if err != nil { + reqLogger.Error("JWT parse error", map[string]interface{}{ + "error": err.Error(), + }) + + // Return specific error based on the error type + if strings.Contains(err.Error(), "expired") { + return nil, ErrTokenExpired + } else if strings.Contains(err.Error(), "signature") { + return nil, ErrInvalidSignature + } else if strings.Contains(err.Error(), "issuer") { + return nil, ErrInvalidIssuer + } else if strings.Contains(err.Error(), "audience") { + return nil, ErrInvalidAudience + } + + return nil, fmt.Errorf("invalid token: %v", err) + } + + if !token.Valid { + reqLogger.Warn("Token is not valid") + return nil, ErrInvalidToken + } + + reqLogger.Info("Token validation successful") + + // Extract claims + claims, ok = token.Claims.(jwt.MapClaims) + if !ok { + reqLogger.Error("Invalid claims format") + return nil, ErrMissingClaims + } + + // Validate required claims + userID := getClaimString(claims, "sub") + if userID == "" { + reqLogger.Error("Missing required claim: sub") + return nil, ErrMissingClaims + } + + return &auth.JWTClaims{ + UserID: userID, + Username: getClaimString(claims, "preferred_username"), + Email: getClaimString(claims, "email"), + Role: getClaimString(claims, "role"), + }, nil +} + +func (k *KeycloakAuthProvider) Name() string { + return "keycloak" +} + +// UnifiedAuthMiddleware provides flexible authentication based on configuration +func UnifiedAuthMiddleware(cfg *config.Config, authService *service.AuthService) gin.HandlerFunc { + factory := NewProviderFactory(authService, cfg) + providers := factory.CreateProviders() + + // Validate that we have at least one provider + if len(providers) == 0 { + logger.Default().Error("No authentication providers configured", map[string]interface{}{ + "auth_type": cfg.Auth.Type, + }) + return func(c *gin.Context) { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "authentication service not configured"}) + } + } + + logger.Default().Info("UnifiedAuthMiddleware initialized", map[string]interface{}{ + "provider_count": len(providers), + "auth_type": cfg.Auth.Type, + }) + + return func(c *gin.Context) { + reqLogger := logger.Default().WithService("unified-auth") + reqLogger.Info("Memulai proses autentikasi", map[string]interface{}{ + "auth_type": cfg.Auth.Type, + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + reqLogger.Warn("Header Authorization tidak ditemukan", map[string]interface{}{ + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": ErrMissingAuthHeader.Error()}) + return + } + + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + reqLogger.Warn("Format header Authorization tidak valid", map[string]interface{}{ + "header_value": authHeader[:min(20, len(authHeader))], // Log first 20 chars for debugging + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": ErrInvalidAuthHeader.Error()}) + return + } + + tokenString := parts[1] + reqLogger.Info("Token diterima", map[string]interface{}{ + "token_length": len(tokenString), + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + + // Coba setiap provider sampai salah satu berhasil + var claims *auth.JWTClaims + var err error + var providerName string + var providerErrors []string + var triedProviders []string + + reqLogger.Info("Starting provider validation loop", map[string]interface{}{ + "provider_count": len(providers), + }) + + for _, provider := range providers { + providerLog := reqLogger.WithField("provider", provider.Name()) + triedProviders = append(triedProviders, provider.Name()) + providerLog.Info("Mencoba validasi dengan provider", map[string]interface{}{ + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + + claims, err = provider.ValidateToken(tokenString) + if err == nil { + providerName = provider.Name() + providerLog.Info("Autentikasi berhasil", map[string]interface{}{ + "user_id": claims.UserID, + "username": claims.Username, + "role": claims.Role, + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + break // Berhenti jika ada yang berhasil + } + + providerLog.Warn("Validasi provider gagal", map[string]interface{}{ + "error": err.Error(), + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + providerErrors = append(providerErrors, fmt.Sprintf("provider %s: %v", provider.Name(), err)) + } + + if err != nil { + reqLogger.Error("Semua provider gagal memvalidasi token", map[string]interface{}{ + "errors": strings.Join(providerErrors, "; "), + "tried_providers": strings.Join(triedProviders, ", "), + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + + // Return specific error message based on the error type + errorMessage := "Token tidak valid" + if errors.Is(err, ErrTokenExpired) { + errorMessage = "Token telah kadaluarsa" + } else if errors.Is(err, ErrInvalidSignature) { + errorMessage = "Signature token tidak valid" + } else if errors.Is(err, ErrInvalidIssuer) { + errorMessage = "Issuer token tidak valid" + } else if errors.Is(err, ErrInvalidAudience) { + errorMessage = "Audience token tidak valid" + } + + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{ + "error": errorMessage, + "details": strings.Join(providerErrors, "; "), + }) + return + } + + // Set informasi pengguna di konteks + if claims != nil { + c.Set("user_id", claims.UserID) + c.Set("username", claims.Username) + c.Set("email", claims.Email) + c.Set("role", claims.Role) + c.Set("auth_provider", providerName) + + reqLogger.Info("User context set successfully", map[string]interface{}{ + "user_id": claims.UserID, + "username": claims.Username, + "role": claims.Role, + "auth_provider": providerName, + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + } else { + reqLogger.Warn("Claims is nil after successful authentication", map[string]interface{}{ + "provider": providerName, + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + } + + reqLogger.Info("Authentication completed successfully, proceeding to next handler", map[string]interface{}{ + "path": c.Request.URL.Path, + "method": c.Request.Method, + }) + + c.Next() + } +} + +// InitializeAuth initializes authentication configuration +func InitializeAuth(cfg *config.Config) { + // This function can be used to initialize global auth settings if needed + logger.Default().Info("Authentication initialized", map[string]interface{}{ + "auth_type": cfg.Auth.Type, + }) +} + +// Helper functions +func getClaimString(claims jwt.MapClaims, key string) string { + if value, ok := claims[key]; ok && value != nil { + if str, ok := value.(string); ok { + return str + } + } + return "" +} + +// JwksCache and related functions +type JwksCache struct { + mu sync.RWMutex + keys map[string]*rsa.PublicKey + expiresAt time.Time + sfGroup singleflight.Group + config *config.Config +} + +func NewJwksCache(cfg *config.Config) *JwksCache { + return &JwksCache{ + keys: make(map[string]*rsa.PublicKey), + config: cfg, + } +} + +func (c *JwksCache) GetKey(kid string) (*rsa.PublicKey, error) { + c.mu.RLock() + if key, ok := c.keys[kid]; ok && time.Now().Before(c.expiresAt) { + c.mu.RUnlock() + return key, nil + } + c.mu.RUnlock() + + // Fetch keys with singleflight to avoid concurrent fetches + v, err, _ := c.sfGroup.Do("fetch_jwks", func() (interface{}, error) { + return c.fetchKeys() + }) + if err != nil { + return nil, err + } + + keys := v.(map[string]*rsa.PublicKey) + + c.mu.Lock() + c.keys = keys + c.expiresAt = time.Now().Add(1 * time.Hour) // cache for 1 hour + c.mu.Unlock() + + key, ok := keys[kid] + if !ok { + return nil, fmt.Errorf("key with kid %s not found", kid) + } + return key, nil +} + +func (c *JwksCache) fetchKeys() (map[string]*rsa.PublicKey, error) { + if c.config.Keycloak.Issuer == "" { + return nil, fmt.Errorf("keycloak issuer is not configured") + } + + jwksURL := c.config.Keycloak.JwksURL + if jwksURL == "" { + // Construct JWKS URL from issuer if not explicitly provided + jwksURL = c.config.Keycloak.Issuer + "/protocol/openid-connect/certs" + } + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Get(jwksURL) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to fetch JWKS: HTTP %d", resp.StatusCode) + } + + var jwksData struct { + Keys []struct { + Kid string `json:"kid"` + Kty string `json:"kty"` + N string `json:"n"` + E string `json:"e"` + } `json:"keys"` + } + + if err := json.NewDecoder(resp.Body).Decode(&jwksData); err != nil { + return nil, err + } + + keys := make(map[string]*rsa.PublicKey) + for _, key := range jwksData.Keys { + if key.Kty != "RSA" { + continue + } + pubKey, err := parseRSAPublicKey(key.N, key.E) + if err != nil { + continue + } + keys[key.Kid] = pubKey + } + return keys, nil +} + +// parseRSAPublicKey parses RSA public key components from base64url strings +func parseRSAPublicKey(nStr, eStr string) (*rsa.PublicKey, error) { + nBytes, err := base64.RawURLEncoding.DecodeString(nStr) + if err != nil { + return nil, err + } + eBytes, err := base64.RawURLEncoding.DecodeString(eStr) + if err != nil { + return nil, err + } + + n := new(big.Int).SetBytes(nBytes) + e := int(new(big.Int).SetBytes(eBytes).Int64()) + + return &rsa.PublicKey{ + N: n, + E: e, + }, nil +} diff --git a/internal/middleware/security.go b/internal/middleware/security.go new file mode 100644 index 0000000..d776b51 --- /dev/null +++ b/internal/middleware/security.go @@ -0,0 +1,316 @@ +// middleware/security.go +package middleware + +import ( + "api-service/internal/config" + "context" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "strings" + "time" + + "github.com/gin-contrib/cors" + "github.com/gin-gonic/gin" + "github.com/go-redis/redis_rate/v10" // Tambahkan library ini: go get github.com/go-redis/redis_rate/v10 + "github.com/redis/go-redis/v9" +) + +// Config menyimpan konfigurasi untuk middleware keamanan +type Config struct { + // CORS + TrustedOrigins []string + + // Rate Limiting + RedisClient *redis.Client + RequestsPerMin int + + // Input Validation + MaxInputLength int +} + +// SwaggerSecurityHeaders adalah middleware khusus untuk route dokumentasi. +// CSP-nya dilonggarkan untuk mengizinkan skrip dan gaya inline yang dibutuhkan Swagger UI. +func SwaggerSecurityHeaders() gin.HandlerFunc { + return func(c *gin.Context) { + // Header lainnya tetap bisa diterapkan + c.Header("X-Frame-Options", "DENY") + c.Header("X-Content-Type-Options", "nosniff") + c.Header("Referrer-Policy", "strict-origin-when-cross-origin") + c.Header("Permissions-Policy", "geolocation=(), microphone=(), camera=(), payment=(), usb=()") + + // CSP yang lebih longgar untuk Swagger UI + // 'unsafe-inline' dibutuhkan untuk skrip dan gaya yang ada di dalam HTML + // data: dibutuhkan jika ada gambar atau resource yang di-encode base64 + cspHeader := "default-src 'self'; " + + "script-src 'self' 'unsafe-inline'; " + // <--- PERUBAHAN UTAMA + "style-src 'self' 'unsafe-inline'; " + // <--- Juga sering dibutuhkan + "img-src 'self' data:; " + // <--- Untuk gambar base64 + "object-src 'none'; " + + "base-uri 'self'; " + + "frame-ancestors 'none';" + c.Header("Content-Security-Policy", cspHeader) + + // HSTS juga bisa diterapkan jika menggunakan HTTPS + if c.Request.TLS != nil { + c.Header("Strict-Transport-Security", "max-age=31536000; includeSubDomains; preload") + } + + c.Next() + } +} + +// SecurityHeaders menambahkan header keamanan standar ke semua respons +func SecurityHeaders() gin.HandlerFunc { + return func(c *gin.Context) { + // Mencegah clickjacking + c.Header("X-Frame-Options", "DENY") + // Mencegah MIME type sniffing + c.Header("X-Content-Type-Options", "nosniff") + // Mengaktifkan proteksi XSS (sudah usang di browser modern tapi tetap baik) + c.Header("X-XSS-Protection", "1; mode=block") + // Kebijakan referrer + c.Header("Referrer-Policy", "strict-origin-when-cross-origin") + // Kebijakan Keamanan Konten (CSP) - Lebih ketat + // Hindari 'unsafe-inline' di produksi. Gunakan nonce atau hash jika memungkinkan. + c.Header("Content-Security-Policy", "default-src 'self'; script-src 'self'; object-src 'none'; base-uri 'self'; frame-ancestors 'none';") + // Kebijakan Izin (Permissions Policy) - Menonaktifkan fitur browser yang tidak dibutuhkan + c.Header("Permissions-Policy", "geolocation=(), microphone=(), camera=(), payment=(), usb=()") + + // HSTS (HTTP Strict Transport Security) - Hanya untuk HTTPS + if c.Request.TLS != nil { + c.Header("Strict-Transport-Security", "max-age=31536000; includeSubDomains; preload") + } + + c.Next() + } +} + +// SecureCORSConfig menyediakan konfigurasi CORS yang aman dan fleksibel +func SecureCORSConfig(cfg config.SecurityConfig) gin.HandlerFunc { + return cors.New(cors.Config{ + AllowOrigins: cfg.TrustedOrigins, + AllowMethods: []string{"GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"}, + AllowHeaders: []string{"Origin", "Content-Length", "Content-Type", "Authorization"}, + ExposeHeaders: []string{"Content-Length"}, + AllowCredentials: true, // Hanya gunakan 'true' jika Anda benar-benar membutuhkannya (cookie, auth) + MaxAge: 12 * time.Hour, + }) +} + +// RateLimitByIPRedis membatasi permintaan per IP menggunakan Redis untuk skalabilitas +func RateLimitByIPRedis(cfg config.SecurityConfig) gin.HandlerFunc { + // Buat koneksi Redis dari konfigurasi + rdb := redis.NewClient(&redis.Options{ + Addr: fmt.Sprintf("%s:%d", cfg.RateLimit.Redis.Host, cfg.RateLimit.Redis.Port), + Password: cfg.RateLimit.Redis.Password, + DB: cfg.RateLimit.Redis.DB, + }) + + // Cek koneksi ke Redis + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + if _, err := rdb.Ping(ctx).Result(); err != nil { + // Jika gagal konek, gunakan fallback di memori dan log error + fmt.Printf("WARNING: Could not connect to Redis: %v. Falling back to in-memory rate limiter.\n", err) + return rateLimitByIPFallback(cfg.RateLimit.RequestsPerMinute) + } + + limiter := redis_rate.NewLimiter(rdb) + return func(c *gin.Context) { + res, err := limiter.Allow(c.Request.Context(), c.ClientIP(), redis_rate.PerMinute(cfg.RateLimit.RequestsPerMinute)) + if err != nil { + fmt.Printf("Rate limiter error: %v\n", err) + c.Next() + return + } + + h := c.Writer.Header() + h.Set("X-RateLimit-Limit", fmt.Sprintf("%d", cfg.RateLimit.RequestsPerMinute)) + h.Set("X-RateLimit-Remaining", fmt.Sprintf("%d", res.Remaining)) + + if res.Allowed == 0 { + h.Set("X-RateLimit-Reset", fmt.Sprintf("%d", time.Now().Add(res.RetryAfter).Unix())) + c.AbortWithStatusJSON(http.StatusTooManyRequests, gin.H{ + "error": "Rate limit exceeded", + }) + return + } + + c.Next() + } +} + +// rateLimitByIPFallback adalah rate limiter sederhana di memori, HANYA untuk pengembangan +func rateLimitByIPFallback(requestsPerMinute int) gin.HandlerFunc { + type client struct { + count int + resetTime int64 + } + clients := make(map[string]*client) + + return func(c *gin.Context) { + ip := c.ClientIP() + now := time.Now().Unix() + + if _, exists := clients[ip]; !exists { + clients[ip] = &client{count: 0, resetTime: now + 60} + } + + cl := clients[ip] + if now > cl.resetTime { + cl.count = 0 + cl.resetTime = now + 60 + } + + if cl.count >= requestsPerMinute { + c.AbortWithStatusJSON(http.StatusTooManyRequests, gin.H{"error": "Rate limit exceeded"}) + return + } + + cl.count++ + c.Next() + } +} + +// InputValidation memvalidasi input untuk mencegah serangan injeksi dan buffer overflow +func InputValidation(cfg config.SecurityConfig) gin.HandlerFunc { + // Pola-pola yang mencurigakan. Ini adalah lapisan pertahanan tambahan (WAF), bukan pengganti prepared statements. + suspiciousPatterns := []string{ + "union select", "union all select", "select.*from", "insert.*into", "update.*set", "delete.*from", + "drop table", "drop database", "alter table", "create table", "exec(", "execute(", "xp_", "sp_", + "information_schema", "sysobjects", "syscolumns", "mysql.", "pg_", "sqlite_", ";--", "/*", "*/", + "@@", "script>", " maxLength { + log.Printf("ERROR: Parameter '%s' with value '%s' (length: %d) exceeds max length %d", key, value, len(value), maxLength) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{ + "error": "Input too long", + "message": fmt.Sprintf("Query parameter '%s' exceeds maximum length", key), + }) + return false + } + } + } + + // Periksa form data (jika sudah di-parse) + if c.Request.PostForm != nil { + for key, values := range c.Request.PostForm { + for _, value := range values { + if len(value) > maxLength { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{ + "error": "Input too long", + "message": fmt.Sprintf("Form parameter '%s' exceeds maximum length", key), + }) + return false + } + } + } + } + return true +} + +// hasInjectionPatterns memeriksa pola injeksi pada query, form, dan body JSON +func hasInjectionPatterns(c *gin.Context, patterns []string) bool { + // Periksa query string + query := strings.ToLower(c.Request.URL.RawQuery) + for _, pattern := range patterns { + if strings.Contains(query, pattern) { + return true + } + } + + // Periksa form data + if err := c.Request.ParseForm(); err == nil { + for _, values := range c.Request.Form { + for _, value := range values { + lowerValue := strings.ToLower(value) + for _, pattern := range patterns { + if strings.Contains(lowerValue, pattern) { + return true + } + } + } + } + } + + // Periksa body JSON + if c.ContentType() == "application/json" { + bodyBytes, err := io.ReadAll(c.Request.Body) + if err != nil { + return false + } + // **PENTING**: Kembalikan body agar bisa dibaca lagi oleh handler (misalnya c.ShouldBindJSON) + c.Request.Body = io.NopCloser(strings.NewReader(string(bodyBytes))) + + var jsonData map[string]interface{} + if err := json.Unmarshal(bodyBytes, &jsonData); err == nil { + if checkMapForPatterns(jsonData, patterns) { + return true + } + } + } + + return false +} + +// checkMapForPatterns memeriksa nilai-nilai di dalam map JSON secara rekursif +func checkMapForPatterns(data map[string]interface{}, patterns []string) bool { + for _, value := range data { + if checkValueForPatterns(value, patterns) { + return true + } + } + return false +} + +func checkValueForPatterns(value interface{}, patterns []string) bool { + switch v := value.(type) { + case string: + lowerValue := strings.ToLower(v) + for _, pattern := range patterns { + if strings.Contains(lowerValue, pattern) { + return true + } + } + case map[string]interface{}: + return checkMapForPatterns(v, patterns) + case []interface{}: + for _, item := range v { + if checkValueForPatterns(item, patterns) { + return true + } + } + } + return false +} diff --git a/internal/models/auth/auth.go b/internal/models/auth/auth.go index 872b45a..746ea78 100644 --- a/internal/models/auth/auth.go +++ b/internal/models/auth/auth.go @@ -1,4 +1,8 @@ -package models +package auth + +import ( + "github.com/golang-jwt/jwt/v5" +) // LoginRequest represents the login request payload type LoginRequest struct { @@ -8,17 +12,32 @@ type LoginRequest struct { // TokenResponse represents the token response type TokenResponse struct { - AccessToken string `json:"access_token"` - TokenType string `json:"token_type"` - ExpiresIn int64 `json:"expires_in"` + AccessToken string `json:"access_token"` + RefreshToken string `json:"refresh_token"` + TokenType string `json:"token_type"` // Biasanya "Bearer" + ExpiresIn int64 `json:"expires_in"` // Durasi dalam detik } // JWTClaims represents the JWT claims type JWTClaims struct { - UserID string `json:"user_id"` - Username string `json:"username"` - Email string `json:"email"` - Role string `json:"role"` + UserID string `json:"sub"` // Gunakan "sub" (subject) sebagai standar untuk ID pengguna + Username string `json:"username"` + Email string `json:"email"` + Role string `json:"role"` + jwt.RegisteredClaims // Menanamkan klaim standar (exp, iat, iss, aud, dll.) +} + +// RegisterRequest represents the register request payload +type RegisterRequest struct { + Username string `json:"username" binding:"required,min=3,max=50"` + Email string `json:"email" binding:"required,email"` + Password string `json:"password" binding:"required,min=6"` + Role string `json:"role" binding:"required,oneof=admin user"` // Contoh validasi role +} + +// RefreshTokenRequest represents the refresh token request payload +type RefreshTokenRequest struct { + RefreshToken string `json:"refresh_token" binding:"required"` } // User represents a user for authentication @@ -26,6 +45,14 @@ type User struct { ID string `json:"id"` Username string `json:"username"` Email string `json:"email"` - Password string `json:"-"` + Password string `json:"-"` // Tidak disertakan saat di-serialize ke JSON + Role string `json:"role"` +} + +// UserResponse represents user data that can be safely returned to the client +type UserResponse struct { + ID string `json:"id"` + Username string `json:"username"` + Email string `json:"email"` Role string `json:"role"` } diff --git a/internal/routes/v1/routes.go b/internal/routes/v1/routes.go index 87c5569..9cc5abe 100644 --- a/internal/routes/v1/routes.go +++ b/internal/routes/v1/routes.go @@ -9,40 +9,67 @@ import ( "api-service/internal/middleware" services "api-service/internal/services/auth" "api-service/pkg/logger" + "net/http" + "strings" "time" "github.com/gin-gonic/gin" + "github.com/golang-jwt/jwt/v5" swaggerFiles "github.com/swaggo/files" ginSwagger "github.com/swaggo/gin-swagger" ) func RegisterRoutes(cfg *config.Config) *gin.Engine { + // Atur mode Gin berdasarkan konfigurasi + gin.SetMode(cfg.Server.Mode) router := gin.New() - // Initialize auth middleware configuration + // ============================================================================= + // GLOBAL MIDDLEWARE STACK (Middleware yang diperlukan SEMUA route) + // ============================================================================= middleware.InitializeAuth(cfg) - - // Add global middleware - router.Use(middleware.CORSConfig()) - router.Use(middleware.ErrorHandler()) - router.Use(logger.RequestLoggerMiddleware(logger.Default())) router.Use(gin.Recovery()) + // 1. CORS (Paling awal) + router.Use(middleware.SecureCORSConfig(cfg.Security)) + // 2. Rate Limiting + router.Use(middleware.RateLimitByIPRedis(cfg.Security)) + // 3. Logging & Recovery + router.Use(logger.RequestLoggerMiddleware(logger.Default())) + // 4. Error Handling (Terakhir, untuk menangkap error dari middleware di atasnya) + router.Use(middleware.ErrorHandler()) + + // ============================================================================= + // INISIALISASI SERVIS & HANDLER + // ============================================================================= - // Initialize services with error handling authService := services.NewAuthService(cfg) if authService == nil { logger.Fatal("Failed to initialize auth service") } - - // Initialize database service dbService := database.New(cfg) // ============================================================================= - // HEALTH CHECK & SYSTEM ROUTES + // SWAGGER DOCUMENTATION (Publik - TANPA SecurityHeaders) // ============================================================================= + // Route ini didefinisikan SEBELUM grup API agar tidak terkena middleware keamanan. + router.GET("/swagger/*any", ginSwagger.WrapHandler( + swaggerFiles.Handler, + ginSwagger.DefaultModelsExpandDepth(-1), + ginSwagger.DeepLinking(true), + )) + // ============================================================================= + // API GROUPS (Dengan Keamanan Ketat) + // ============================================================================= + // Terapkan middleware keamanan dan validasi input HANYA ke grup API. + // Ini adalah perubahan utama. + apiGroup := router.Group("/api") + apiGroup.Use(middleware.SecurityHeaders()) // <--- PINDAHKAN KE SINI + apiGroup.Use(middleware.InputValidation(cfg.Security)) // <--- PINDAHKAN KE SINI + + // --- HEALTH CHECK & SYSTEM ROUTES --- healthCheckHandler := healthcheckHandlers.NewHealthCheckHandler(dbService) - sistem := router.Group("/api/sistem") + sistem := apiGroup.Group("/sistem") { sistem.GET("/health", healthCheckHandler.CheckHealth) sistem.GET("/databases", func(c *gin.Context) { @@ -62,89 +89,93 @@ func RegisterRoutes(cfg *config.Config) *gin.Engine { }) } - // ============================================================================= - // SWAGGER DOCUMENTATION - // ============================================================================= - - router.GET("/swagger/*any", ginSwagger.WrapHandler( - swaggerFiles.Handler, - ginSwagger.DefaultModelsExpandDepth(-1), - ginSwagger.DeepLinking(true), - )) - - // ============================================================================= - // API v1 GROUP - // ============================================================================= - - v1 := router.Group("/api/v1") - - // ============================================================================= - // PUBLIC ROUTES (No Authentication Required) - // ============================================================================= - - // Authentication routes - authHandler := authHandlers.NewAuthHandler(authService) - tokenHandler := authHandlers.NewTokenHandler(authService) - - // Basic auth routes - v1.POST("/auth/login", authHandler.Login) - v1.POST("/auth/register", authHandler.Register) - v1.POST("/auth/refresh", authHandler.RefreshToken) - - // Token generation routes - v1.POST("/token/generate", tokenHandler.GenerateToken) - v1.POST("/token/generate-direct", tokenHandler.GenerateTokenDirect) - - // ============================================================================= - // PUBLISHED ROUTES - - // Retribusi endpoints with - retribusiHandler := retribusiHandlers.NewRetribusiHandler() - retribusiGroup := v1.Group("/retribusi") + // --- API v1 GROUP --- + v1 := apiGroup.Group("/v1") { - retribusiGroup.GET("", retribusiHandler.GetRetribusi) - retribusiGroup.GET("/dynamic", retribusiHandler.GetRetribusiDynamic) - retribusiGroup.GET("/search", retribusiHandler.SearchRetribusiAdvanced) - retribusiGroup.GET("/id/:id", retribusiHandler.GetRetribusiByID) - retribusiGroup.POST("", func(c *gin.Context) { - retribusiHandler.CreateRetribusi(c) - }) + // ============================================================================= + // PUBLIC ROUTES (No Authentication Required) + // ============================================================================= + authHandler := authHandlers.NewAuthHandler(authService) + tokenHandler := authHandlers.NewTokenHandler(authService) - retribusiGroup.PUT("/id/:id", func(c *gin.Context) { - retribusiHandler.UpdateRetribusi(c) - }) + v1.POST("/auth/login", authHandler.Login) + v1.POST("/auth/register", authHandler.Register) + v1.POST("/auth/refresh", authHandler.RefreshToken) - retribusiGroup.DELETE("/id/:id", func(c *gin.Context) { - retribusiHandler.DeleteRetribusi(c) - }) + v1.POST("/token/generate", tokenHandler.GenerateToken) + v1.POST("/token/generate-direct", tokenHandler.GenerateTokenDirect) + + retribusiHandler := retribusiHandlers.NewRetribusiHandler() + retribusiGroup := v1.Group("/retribusi") + { + retribusiGroup.GET("", retribusiHandler.GetRetribusi) + retribusiGroup.GET("/dynamic", retribusiHandler.GetRetribusiDynamic) + retribusiGroup.GET("/id/:id", retribusiHandler.GetRetribusiByID) + retribusiGroup.POST("", func(c *gin.Context) { + retribusiHandler.CreateRetribusi(c) + }) + retribusiGroup.PUT("/id/:id", func(c *gin.Context) { + retribusiHandler.UpdateRetribusi(c) + }) + retribusiGroup.DELETE("/id/:id", func(c *gin.Context) { + retribusiHandler.DeleteRetribusi(c) + }) + } + + // ============================================================================= + // PROTECTED ROUTES (Authentication Required) + // ============================================================================= + protected := v1.Group("/") + protected.Use(middleware.UnifiedAuthMiddleware(cfg, authService)) + + // farmasiObatHandler := farmasiObatHandlers.NewObatHandler() + // protectedFarmasiGroup := protected.Group("/farmasi/obat") + // { + // protectedFarmasiGroup.GET("", farmasiObatHandler.GetObat) + // protectedFarmasiGroup.GET("/kode/:kode", farmasiObatHandler.GetObatByID) + // } + + // protectedAuthGroup := protected.Group("/auth") + // { + // protectedAuthGroup.GET("/me", authHandler.Me) + // } } // ============================================================================= - // PROTECTED ROUTES (Authentication Required) + // DEBUG ROUTES (Publik - Tanpa keamanan ketat) // ============================================================================= + router.GET("/debug/token", func(c *gin.Context) { + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "Header Authorization hilang"}) + return + } - protected := v1.Group("/") - protected.Use(middleware.ConfigurableAuthMiddleware(cfg)) + parts := strings.SplitN(authHeader, " ", 2) + if len(parts) != 2 || strings.ToLower(parts[0]) != "bearer" { + c.JSON(http.StatusBadRequest, gin.H{"error": "Format header harus Bearer {token}"}) + return + } - // Protected retribusi endpoints (Authentication Required) - // protectedRetribusiGroup := protected.Group("/retribusi") - // { - // protectedRetribusiGroup.GET("", retribusiHandler.GetRetribusi) - // protectedRetribusiGroup.GET("/dynamic", retribusiHandler.GetRetribusiDynamic) - // protectedRetribusiGroup.GET("/search", retribusiHandler.SearchRetribusiAdvanced) - // protectedRetribusiGroup.GET("/id/:id", retribusiHandler.GetRetribusiByID) - // protectedRetribusiGroup.POST("", func(c *gin.Context) { - // retribusiHandler.CreateRetribusi(c) - // }) + tokenString := parts[1] - // protectedRetribusiGroup.PUT("/id/:id", func(c *gin.Context) { - // retribusiHandler.UpdateRetribusi(c) - // }) + token, _, err := new(jwt.Parser).ParseUnverified(tokenString, jwt.MapClaims{}) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Gagal parsing token: " + err.Error()}) + return + } - // protectedRetribusiGroup.DELETE("/id/:id", func(c *gin.Context) { - // retribusiHandler.DeleteRetribusi(c) - // }) - // } + claims, ok := token.Claims.(jwt.MapClaims) + if !ok { + c.JSON(http.StatusBadRequest, gin.H{"error": "Format claim tidak valid"}) + return + } + + c.JSON(http.StatusOK, gin.H{ + "header": token.Header, + "claims": claims, + }) + }) return router } diff --git a/internal/services/auth/auth.go b/internal/services/auth/auth.go index d76aadb..96a1e1f 100644 --- a/internal/services/auth/auth.go +++ b/internal/services/auth/auth.go @@ -1,9 +1,11 @@ -package services +// services/auth/service.go +package auth import ( "api-service/internal/config" models "api-service/internal/models/auth" "errors" + "os" "time" "github.com/golang-jwt/jwt/v5" @@ -12,8 +14,9 @@ import ( // AuthService handles authentication logic type AuthService struct { - config *config.Config - users map[string]*models.User // In-memory user store for demo + config *config.Config + users map[string]*models.User // In-memory user store for demo + jwtSecret []byte } // NewAuthService creates a new authentication service @@ -38,9 +41,16 @@ func NewAuthService(cfg *config.Config) *AuthService { Role: "user", } + // Get JWT secret from environment or use default + jwtSecret := []byte(os.Getenv("JWT_SECRET")) + if len(jwtSecret) == 0 { + jwtSecret = []byte("your-secret-key-change-this-in-production") + } + return &AuthService{ - config: cfg, - users: users, + config: cfg, + users: users, + jwtSecret: jwtSecret, } } @@ -58,65 +68,148 @@ func (s *AuthService) Login(username, password string) (*models.TokenResponse, e } // Generate JWT token - token, err := s.generateToken(user) + token, expiresIn, err := s.generateToken(user) + if err != nil { + return nil, err + } + + // Generate refresh token + refreshToken, err := s.generateRefreshToken(user) if err != nil { return nil, err } return &models.TokenResponse{ - AccessToken: token, - TokenType: "Bearer", - ExpiresIn: 3600, // 1 hour + AccessToken: token, + RefreshToken: refreshToken, + TokenType: "Bearer", + ExpiresIn: expiresIn, }, nil } -// generateToken creates a new JWT token for the user -func (s *AuthService) generateToken(user *models.User) (string, error) { +// RefreshToken generates a new access token using a valid refresh token +func (s *AuthService) RefreshToken(refreshTokenString string) (*models.TokenResponse, error) { + // Parse and validate the refresh token + token, err := jwt.Parse(refreshTokenString, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, errors.New("unexpected signing method") + } + return s.jwtSecret, nil + }) + + if err != nil { + return nil, errors.New("invalid refresh token") + } + + if !token.Valid { + return nil, errors.New("invalid refresh token") + } + + claims, ok := token.Claims.(jwt.MapClaims) + if !ok { + return nil, errors.New("invalid token claims") + } + + // Check if it's a refresh token + tokenType, ok := claims["type"].(string) + if !ok || tokenType != "refresh" { + return nil, errors.New("not a refresh token") + } + + // Get user ID from claims + userID, ok := claims["user_id"].(string) + if !ok { + return nil, errors.New("invalid user ID in token") + } + + // Find user + var user *models.User + for _, u := range s.users { + if u.ID == userID { + user = u + break + } + } + + if user == nil { + return nil, errors.New("user not found") + } + + // Generate new access token + accessToken, expiresIn, err := s.generateToken(user) + if err != nil { + return nil, err + } + + // Generate new refresh token + newRefreshToken, err := s.generateRefreshToken(user) + if err != nil { + return nil, err + } + + return &models.TokenResponse{ + AccessToken: accessToken, + RefreshToken: newRefreshToken, + TokenType: "Bearer", + ExpiresIn: expiresIn, + }, nil +} + +// generateToken creates a new JWT access token for the user +func (s *AuthService) generateToken(user *models.User) (string, int64, error) { // Create claims + now := time.Now() + expiresAt := now.Add(time.Hour * 1) // 1 hour expiration + claims := jwt.MapClaims{ "user_id": user.ID, "username": user.Username, "email": user.Email, "role": user.Role, - "exp": time.Now().Add(time.Hour * 1).Unix(), - "iat": time.Now().Unix(), + "type": "access", + "exp": expiresAt.Unix(), + "iat": now.Unix(), } // Create token token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) // Sign token with secret key - secretKey := []byte(s.getJWTSecret()) - return token.SignedString(secretKey) + tokenString, err := token.SignedString(s.jwtSecret) + if err != nil { + return "", 0, err + } + + return tokenString, int64(time.Hour.Seconds()), nil } -// GenerateTokenForUser generates a JWT token for a specific user -func (s *AuthService) GenerateTokenForUser(user *models.User) (string, error) { +// generateRefreshToken creates a new JWT refresh token for the user +func (s *AuthService) generateRefreshToken(user *models.User) (string, error) { // Create claims + now := time.Now() + expiresAt := now.Add(time.Hour * 24 * 7) // 7 days expiration + claims := jwt.MapClaims{ - "user_id": user.ID, - "username": user.Username, - "email": user.Email, - "role": user.Role, - "exp": time.Now().Add(time.Hour * 1).Unix(), - "iat": time.Now().Unix(), + "user_id": user.ID, + "type": "refresh", + "exp": expiresAt.Unix(), + "iat": now.Unix(), } // Create token token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) // Sign token with secret key - secretKey := []byte(s.getJWTSecret()) - return token.SignedString(secretKey) + return token.SignedString(s.jwtSecret) } -// ValidateToken validates the JWT token +// ValidateToken validates the JWT access token func (s *AuthService) ValidateToken(tokenString string) (*models.JWTClaims, error) { token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { return nil, errors.New("unexpected signing method") } - return []byte(s.getJWTSecret()), nil + return s.jwtSecret, nil }) if err != nil { @@ -132,6 +225,12 @@ func (s *AuthService) ValidateToken(tokenString string) (*models.JWTClaims, erro return nil, errors.New("invalid claims") } + // Check if it's an access token + tokenType, ok := claims["type"].(string) + if !ok || tokenType != "access" { + return nil, errors.New("not an access token") + } + return &models.JWTClaims{ UserID: claims["user_id"].(string), Username: claims["username"].(string), @@ -140,12 +239,6 @@ func (s *AuthService) ValidateToken(tokenString string) (*models.JWTClaims, erro }, nil } -// getJWTSecret returns the JWT secret key -func (s *AuthService) getJWTSecret() string { - // In production, this should come from environment variables - return "your-secret-key-change-this-in-production" -} - // RegisterUser registers a new user (for demo purposes) func (s *AuthService) RegisterUser(username, email, password, role string) error { if _, exists := s.users[username]; exists { @@ -167,3 +260,37 @@ func (s *AuthService) RegisterUser(username, email, password, role string) error return nil } + +// GenerateToken generates a JWT token for the given user data (public method) +func (s *AuthService) GenerateToken(userID, username, email, role string) (*models.TokenResponse, error) { + user := &models.User{ + ID: userID, + Username: username, + Email: email, + Role: role, + } + + // Generate access token + token, expiresIn, err := s.generateToken(user) + if err != nil { + return nil, err + } + + // Generate refresh token + refreshToken, err := s.generateRefreshToken(user) + if err != nil { + return nil, err + } + + return &models.TokenResponse{ + AccessToken: token, + RefreshToken: refreshToken, + TokenType: "Bearer", + ExpiresIn: expiresIn, + }, nil +} + +// GenerateTokenDirect generates a JWT token directly for the given user data (public method) +func (s *AuthService) GenerateTokenDirect(userID, username, email, role string) (*models.TokenResponse, error) { + return s.GenerateToken(userID, username, email, role) +} diff --git a/internal/utils/query/builder.go b/internal/utils/query/builder.go new file mode 100644 index 0000000..5f67839 --- /dev/null +++ b/internal/utils/query/builder.go @@ -0,0 +1,2325 @@ +package utils + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "strings" + "time" + + "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +// DBType represents the type of database +type DBType string + +const ( + DBTypePostgreSQL DBType = "postgres" + DBTypeMySQL DBType = "mysql" + DBTypeSQLite DBType = "sqlite" + DBTypeSQLServer DBType = "sqlserver" + DBTypeMongoDB DBType = "mongodb" +) + +// FilterOperator represents supported filter operators +type FilterOperator string + +const ( + OpEqual FilterOperator = "_eq" + OpNotEqual FilterOperator = "_neq" + OpLike FilterOperator = "_like" + OpILike FilterOperator = "_ilike" + OpIn FilterOperator = "_in" + OpNotIn FilterOperator = "_nin" + OpGreaterThan FilterOperator = "_gt" + OpGreaterThanEqual FilterOperator = "_gte" + OpLessThan FilterOperator = "_lt" + OpLessThanEqual FilterOperator = "_lte" + OpBetween FilterOperator = "_between" + OpNotBetween FilterOperator = "_nbetween" + OpNull FilterOperator = "_null" + OpNotNull FilterOperator = "_nnull" + OpContains FilterOperator = "_contains" + OpNotContains FilterOperator = "_ncontains" + OpStartsWith FilterOperator = "_starts_with" + OpEndsWith FilterOperator = "_ends_with" + OpJsonContains FilterOperator = "_json_contains" + OpJsonNotContains FilterOperator = "_json_ncontains" + OpJsonExists FilterOperator = "_json_exists" + OpJsonNotExists FilterOperator = "_json_nexists" + OpJsonEqual FilterOperator = "_json_eq" + OpJsonNotEqual FilterOperator = "_json_neq" + OpArrayContains FilterOperator = "_array_contains" + OpArrayNotContains FilterOperator = "_array_ncontains" + OpArrayLength FilterOperator = "_array_length" +) + +// DynamicFilter represents a single filter condition +type DynamicFilter struct { + Column string `json:"column"` + Operator FilterOperator `json:"operator"` + Value interface{} `json:"value"` + // Additional options for complex filters + Options map[string]interface{} `json:"options,omitempty"` +} + +// FilterGroup represents a group of filters with a logical operator (AND/OR) +type FilterGroup struct { + Filters []DynamicFilter `json:"filters"` + LogicOp string `json:"logic_op"` // AND, OR +} + +// SelectField represents a field in the SELECT clause, supporting expressions and aliases +type SelectField struct { + Expression string `json:"expression"` // e.g., "TMLogBarang.Nama", "COUNT(*)" + Alias string `json:"alias"` // e.g., "obat_nama", "total_count" + // Window function support + WindowFunction *WindowFunction `json:"window_function,omitempty"` +} + +// WindowFunction represents a window function with its configuration +type WindowFunction struct { + Function string `json:"function"` // e.g., "ROW_NUMBER", "RANK", "DENSE_RANK", "LEAD", "LAG" + Over string `json:"over"` // PARTITION BY expression + OrderBy string `json:"order_by"` // ORDER BY expression + Frame string `json:"frame"` // ROWS/RANGE clause + Alias string `json:"alias"` // Alias for the window function +} + +// Join represents a JOIN clause +type Join struct { + Type string `json:"type"` // "INNER", "LEFT", "RIGHT", "FULL" + Table string `json:"table"` // Table name to join + Alias string `json:"alias"` // Table alias + OnConditions FilterGroup `json:"on_conditions"` // Conditions for the ON clause + // LATERAL JOIN support + Lateral bool `json:"lateral,omitempty"` +} + +// Union represents a UNION clause +type Union struct { + Type string `json:"type"` // "UNION", "UNION ALL" + Query DynamicQuery `json:"query"` // The subquery to union with +} + +// CTE (Common Table Expression) represents a WITH clause +type CTE struct { + Name string `json:"name"` // CTE alias name + Query DynamicQuery `json:"query"` // The query defining the CTE + // Recursive CTE support + Recursive bool `json:"recursive,omitempty"` +} + +// DynamicQuery represents the complete query structure +type DynamicQuery struct { + Fields []SelectField `json:"fields,omitempty"` + From string `json:"from"` // Main table name + Aliases string `json:"aliases"` // Main table alias + Joins []Join `json:"joins,omitempty"` + Filters []FilterGroup `json:"filters,omitempty"` + GroupBy []string `json:"group_by,omitempty"` + Having []FilterGroup `json:"having,omitempty"` + Unions []Union `json:"unions,omitempty"` + CTEs []CTE `json:"ctes,omitempty"` + Sort []SortField `json:"sort,omitempty"` + Limit int `json:"limit"` + Offset int `json:"offset"` + // Window function support + WindowFunctions []WindowFunction `json:"window_functions,omitempty"` + // JSON operations + JsonOperations []JsonOperation `json:"json_operations,omitempty"` +} + +// JsonOperation represents a JSON operation +type JsonOperation struct { + Type string `json:"type"` // "extract", "exists", "contains", etc. + Column string `json:"column"` // JSON column + Path string `json:"path"` // JSON path + Value interface{} `json:"value,omitempty"` // Value for comparison + Alias string `json:"alias,omitempty"` // Alias for the result +} + +// SortField represents sorting configuration +type SortField struct { + Column string `json:"column"` + Order string `json:"order"` // ASC, DESC +} + +// UpdateData represents data for UPDATE operations +type UpdateData struct { + Columns []string `json:"columns"` + Values []interface{} `json:"values"` + // JSON update support + JsonUpdates map[string]JsonUpdate `json:"json_updates,omitempty"` +} + +// JsonUpdate represents a JSON update operation +type JsonUpdate struct { + Path string `json:"path"` // JSON path + Value interface{} `json:"value"` // New value +} + +// InsertData represents data for INSERT operations +type InsertData struct { + Columns []string `json:"columns"` + Values []interface{} `json:"values"` + // JSON insert support + JsonValues map[string]interface{} `json:"json_values,omitempty"` +} + +// QueryBuilder builds SQL queries from dynamic filters using squirrel +type QueryBuilder struct { + dbType DBType + sqlBuilder squirrel.StatementBuilderType + allowedColumns map[string]bool // Security: only allow specified columns + // Security settings + enableSecurityChecks bool + maxAllowedRows int + // SQL injection prevention patterns + dangerousPatterns []*regexp.Regexp +} + +// NewQueryBuilder creates a new query builder instance for a specific database type +func NewQueryBuilder(dbType DBType) *QueryBuilder { + var placeholderFormat squirrel.PlaceholderFormat + + switch dbType { + case DBTypePostgreSQL: + placeholderFormat = squirrel.Dollar + case DBTypeMySQL, DBTypeSQLite: + placeholderFormat = squirrel.Question + case DBTypeSQLServer: + placeholderFormat = squirrel.AtP + default: + placeholderFormat = squirrel.Question + } + + // Initialize dangerous patterns for SQL injection prevention + dangerousPatterns := []*regexp.Regexp{ + regexp.MustCompile(`(?i)(union|select|insert|update|delete|drop|alter|create|exec|execute)\s`), + regexp.MustCompile(`(?i)(--|\/\*|\*\/)`), + regexp.MustCompile(`(?i)(or|and)\s+1\s*=\s*1`), + regexp.MustCompile(`(?i)(or|and)\s+true`), + regexp.MustCompile(`(?i)(xp_|sp_)\w+`), // SQL Server extended procedures + regexp.MustCompile(`(?i)(waitfor\s+delay)`), // SQL Server time-based attack + } + + return &QueryBuilder{ + dbType: dbType, + sqlBuilder: squirrel.StatementBuilder.PlaceholderFormat(placeholderFormat), + allowedColumns: make(map[string]bool), + enableSecurityChecks: true, + maxAllowedRows: 10000, + dangerousPatterns: dangerousPatterns, + } +} + +// SetSecurityOptions configures security settings +func (qb *QueryBuilder) SetSecurityOptions(enableChecks bool, maxRows int) *QueryBuilder { + qb.enableSecurityChecks = enableChecks + qb.maxAllowedRows = maxRows + return qb +} + +// SetAllowedColumns sets the list of allowed columns for security +func (qb *QueryBuilder) SetAllowedColumns(columns []string) *QueryBuilder { + qb.allowedColumns = make(map[string]bool) + for _, col := range columns { + qb.allowedColumns[col] = true + } + return qb +} + +// BuildQuery builds the complete SQL SELECT query with support for CTEs, JOINs, and UNIONs +func (qb *QueryBuilder) BuildQuery(query DynamicQuery) (string, []interface{}, error) { + var allArgs []interface{} + var queryParts []string + + // Security check for limit + if qb.enableSecurityChecks && query.Limit > qb.maxAllowedRows { + return "", nil, fmt.Errorf("requested limit %d exceeds maximum allowed %d", query.Limit, qb.maxAllowedRows) + } + + // 1. Build CTEs (WITH clause) + if len(query.CTEs) > 0 { + cteClause, cteArgs, err := qb.buildCTEClause(query.CTEs) + if err != nil { + return "", nil, err + } + queryParts = append(queryParts, cteClause) + allArgs = append(allArgs, cteArgs...) + } + + // 2. Build Main Query using Squirrel's From and Join methods + fromClause := qb.buildFromClause(query.From, query.Aliases) + selectFields := qb.buildSelectFields(query.Fields) + + // Start building the main query + var mainQuery squirrel.SelectBuilder + if len(query.WindowFunctions) > 0 || len(query.JsonOperations) > 0 { + // We need to add window functions and JSON operations after initial select + mainQuery = qb.sqlBuilder.Select(selectFields...).From(fromClause) + } else { + mainQuery = qb.sqlBuilder.Select(selectFields...).From(fromClause) + } + + // Add JOINs using Squirrel's Join method + if len(query.Joins) > 0 { + for _, join := range query.Joins { + joinType, tableWithAlias, onClause, joinArgs, err := qb.buildSingleJoinClause(join) + if err != nil { + return "", nil, err + } + joinStr := tableWithAlias + " ON " + onClause + switch strings.ToUpper(joinType) { + case "LEFT": + if join.Lateral { + mainQuery = mainQuery.LeftJoin("LATERAL "+joinStr, joinArgs...) + } else { + mainQuery = mainQuery.LeftJoin(joinStr, joinArgs...) + } + case "RIGHT": + mainQuery = mainQuery.RightJoin(joinStr, joinArgs...) + case "FULL": + mainQuery = mainQuery.Join("FULL JOIN "+joinStr, joinArgs...) + default: + if join.Lateral { + mainQuery = mainQuery.Join("LATERAL "+joinStr, joinArgs...) + } else { + mainQuery = mainQuery.Join(joinStr, joinArgs...) + } + } + } + } + + // 4. Apply WHERE conditions + if len(query.Filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(query.Filters) + if err != nil { + return "", nil, err + } + mainQuery = mainQuery.Where(whereClause, whereArgs...) + } + + // 5. Apply GROUP BY + if len(query.GroupBy) > 0 { + mainQuery = mainQuery.GroupBy(qb.buildGroupByColumns(query.GroupBy)...) + } + + // 6. Apply HAVING conditions + if len(query.Having) > 0 { + havingClause, havingArgs, err := qb.BuildWhereClause(query.Having) + if err != nil { + return "", nil, err + } + mainQuery = mainQuery.Having(havingClause, havingArgs...) + } + + // 7. Apply ORDER BY + if len(query.Sort) > 0 { + for _, sort := range query.Sort { + column := qb.validateAndEscapeColumn(sort.Column) + if column == "" { + continue + } + order := "ASC" + if strings.ToUpper(sort.Order) == "DESC" { + order = "DESC" + } + mainQuery = mainQuery.OrderBy(fmt.Sprintf("%s %s", column, order)) + } + } + + // 8. Apply window functions and JSON operations by modifying the SELECT clause + if len(query.WindowFunctions) > 0 || len(query.JsonOperations) > 0 { + // We need to rebuild the SELECT clause with window functions and JSON operations + var finalSelectFields []string + finalSelectFields = append(finalSelectFields, selectFields...) + + // Add window functions + for _, wf := range query.WindowFunctions { + windowFunc, err := qb.buildWindowFunction(wf) + if err != nil { + return "", nil, err + } + finalSelectFields = append(finalSelectFields, windowFunc) + } + + // Add JSON operations + for _, jo := range query.JsonOperations { + jsonExpr, jsonArgs, err := qb.buildJsonOperation(jo) + if err != nil { + return "", nil, err + } + if jo.Alias != "" { + jsonExpr += " AS " + qb.escapeIdentifier(jo.Alias) + } + finalSelectFields = append(finalSelectFields, jsonExpr) + allArgs = append(allArgs, jsonArgs...) + } + + // Rebuild the query with the complete SELECT clause + mainQuery = qb.sqlBuilder.Select(finalSelectFields...).From(fromClause) + + // Re-apply all the other clauses + if len(query.Joins) > 0 { + for _, join := range query.Joins { + joinType, tableWithAlias, onClause, joinArgs, err := qb.buildSingleJoinClause(join) + if err != nil { + return "", nil, err + } + joinStr := tableWithAlias + " ON " + onClause + switch strings.ToUpper(joinType) { + case "LEFT": + if join.Lateral { + mainQuery = mainQuery.LeftJoin("LATERAL "+joinStr, joinArgs...) + } else { + mainQuery = mainQuery.LeftJoin(joinStr, joinArgs...) + } + case "RIGHT": + mainQuery = mainQuery.RightJoin(joinStr, joinArgs...) + case "FULL": + mainQuery = mainQuery.Join("FULL JOIN "+joinStr, joinArgs...) + default: + if join.Lateral { + mainQuery = mainQuery.Join("LATERAL "+joinStr, joinArgs...) + } else { + mainQuery = mainQuery.Join(joinStr, joinArgs...) + } + } + } + } + + if len(query.Filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(query.Filters) + if err != nil { + return "", nil, err + } + mainQuery = mainQuery.Where(whereClause, whereArgs...) + } + + if len(query.GroupBy) > 0 { + mainQuery = mainQuery.GroupBy(qb.buildGroupByColumns(query.GroupBy)...) + } + + if len(query.Having) > 0 { + havingClause, havingArgs, err := qb.BuildWhereClause(query.Having) + if err != nil { + return "", nil, err + } + mainQuery = mainQuery.Having(havingClause, havingArgs...) + } + + if len(query.Sort) > 0 { + for _, sort := range query.Sort { + column := qb.validateAndEscapeColumn(sort.Column) + if column == "" { + continue + } + order := "ASC" + if strings.ToUpper(sort.Order) == "DESC" { + order = "DESC" + } + mainQuery = mainQuery.OrderBy(fmt.Sprintf("%s %s", column, order)) + } + } + } + + // 9. Apply pagination with dialect-specific syntax + if query.Limit > 0 { + if qb.dbType == DBTypeSQLServer { + // SQL Server requires ORDER BY for OFFSET FETCH + if len(query.Sort) == 0 { + mainQuery = mainQuery.OrderBy("(SELECT 1)") + } + mainQuery = mainQuery.Suffix(fmt.Sprintf("OFFSET %d ROWS FETCH NEXT %d ROWS ONLY", query.Offset, query.Limit)) + } else { + mainQuery = mainQuery.Limit(uint64(query.Limit)) + if query.Offset > 0 { + mainQuery = mainQuery.Offset(uint64(query.Offset)) + } + } + } else if query.Offset > 0 && qb.dbType != DBTypeSQLServer { + mainQuery = mainQuery.Offset(uint64(query.Offset)) + } + + // Build final main query SQL + sql, args, err := mainQuery.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build main query: %w", err) + } + queryParts = append(queryParts, sql) + allArgs = append(allArgs, args...) + + // 10. Apply UNIONs + if len(query.Unions) > 0 { + unionClause, unionArgs, err := qb.buildUnionClause(query.Unions) + if err != nil { + return "", nil, err + } + queryParts = append(queryParts, unionClause) + allArgs = append(allArgs, unionArgs...) + } + + finalSQL := strings.Join(queryParts, " ") + + // Security check for dangerous patterns + if qb.enableSecurityChecks { + if err := qb.checkForSqlInjection(finalSQL); err != nil { + return "", nil, err + } + } + + fmt.Printf("[DEBUG BuilderQuery] Final SQL query: %s\n", finalSQL) + fmt.Printf("[DEBUG] Query args: %v\n", allArgs) + return finalSQL, allArgs, nil +} + +// buildWindowFunction builds a window function expression +func (qb *QueryBuilder) buildWindowFunction(wf WindowFunction) (string, error) { + if !qb.isValidFunctionName(wf.Function) { + return "", fmt.Errorf("invalid window function name: %s", wf.Function) + } + + windowExpr := fmt.Sprintf("%s() OVER (", wf.Function) + + if wf.Over != "" { + windowExpr += fmt.Sprintf("PARTITION BY %s ", wf.Over) + } + + if wf.OrderBy != "" { + windowExpr += fmt.Sprintf("ORDER BY %s ", wf.OrderBy) + } + + if wf.Frame != "" { + windowExpr += wf.Frame + } + + windowExpr += ")" + + if wf.Alias != "" { + windowExpr += " AS " + qb.escapeIdentifier(wf.Alias) + } + + return windowExpr, nil +} + +// buildJsonOperation builds a JSON operation expression +func (qb *QueryBuilder) buildJsonOperation(jo JsonOperation) (string, []interface{}, error) { + column := qb.validateAndEscapeColumn(jo.Column) + if column == "" { + return "", nil, fmt.Errorf("invalid or disallowed column: %s", jo.Column) + } + + path := jo.Path + if path == "" { + path = "$" + } + + var expr string + var args []interface{} + + switch strings.ToLower(jo.Type) { + case "extract": + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s->>%s", column, qb.escapeJsonPath(path)) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_EXTRACT(%s, '%s')", column, path) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s')", column, qb.escapeSqlServerJsonPath(path)) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s')", column, path) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case "exists": + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("jsonb_path_exists(%s, '%s')", column, path) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS_PATH(%s, 'one', '%s')", column, path) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') IS NOT NULL", column, qb.escapeSqlServerJsonPath(path)) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') IS NOT NULL", column, path) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case "contains": + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s @> %s", column, "?") + args = append(args, jo.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS(%s, ?, '%s')", column, path) + args = append(args, jo.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') = ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, jo.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') = ?", column, path) + args = append(args, jo.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + default: + return "", nil, fmt.Errorf("unsupported JSON operation type: %s", jo.Type) + } + + return expr, args, nil +} + +// escapeJsonPath escapes a JSON path for PostgreSQL +func (qb *QueryBuilder) escapeJsonPath(path string) string { + // Simple implementation - in a real scenario, you'd need more sophisticated escaping + return "'" + strings.ReplaceAll(path, "'", "''") + "'" +} + +// escapeSqlServerJsonPath escapes a JSON path for SQL Server +func (qb *QueryBuilder) escapeSqlServerJsonPath(path string) string { + // Convert JSONPath to SQL Server format + // $.path.to.property -> '$.path.to.property' + if !strings.HasPrefix(path, "$") { + path = "$." + path + } + return strings.ReplaceAll(path, ".", ".") +} + +// buildCTEClause builds the WITH clause for Common Table Expressions +func (qb *QueryBuilder) buildCTEClause(ctes []CTE) (string, []interface{}, error) { + var cteParts []string + var allArgs []interface{} + + hasRecursive := false + for _, cte := range ctes { + if cte.Recursive { + hasRecursive = true + break + } + } + + withClause := "WITH" + if hasRecursive { + withClause = "WITH RECURSIVE" + } + + for _, cte := range ctes { + subQuery, args, err := qb.BuildQuery(cte.Query) + if err != nil { + return "", nil, fmt.Errorf("failed to build CTE '%s': %w", cte.Name, err) + } + cteParts = append(cteParts, fmt.Sprintf("%s AS (%s)", qb.escapeIdentifier(cte.Name), subQuery)) + allArgs = append(allArgs, args...) + } + + return fmt.Sprintf("%s %s", withClause, strings.Join(cteParts, ", ")), allArgs, nil +} + +// buildFromClause builds the FROM clause with optional alias +func (qb *QueryBuilder) buildFromClause(table, alias string) string { + fromClause := qb.escapeIdentifier(table) + if alias != "" { + fromClause += " " + qb.escapeIdentifier(alias) + } + return fromClause +} + +// buildSingleJoinClause builds a single JOIN clause components +func (qb *QueryBuilder) buildSingleJoinClause(join Join) (string, string, string, []interface{}, error) { + joinType := strings.ToUpper(join.Type) + if joinType == "" { + joinType = "INNER" + } + + table := qb.escapeIdentifier(join.Table) + if join.Alias != "" { + table += " " + qb.escapeIdentifier(join.Alias) + } + + onClause, onArgs, err := qb.BuildWhereClause([]FilterGroup{join.OnConditions}) + if err != nil { + return "", "", "", nil, fmt.Errorf("failed to build ON clause for join on table %s: %w", join.Table, err) + } + + return joinType, table, onClause, onArgs, nil +} + +// buildUnionClause builds the UNION clause +func (qb *QueryBuilder) buildUnionClause(unions []Union) (string, []interface{}, error) { + var unionParts []string + var allArgs []interface{} + + for _, union := range unions { + subQuery, args, err := qb.BuildQuery(union.Query) + if err != nil { + return "", nil, fmt.Errorf("failed to build subquery for UNION: %w", err) + } + unionType := strings.ToUpper(union.Type) + if unionType == "" { + unionType = "UNION" + } + unionParts = append(unionParts, fmt.Sprintf("%s %s", unionType, subQuery)) + allArgs = append(allArgs, args...) + } + + return strings.Join(unionParts, " "), allArgs, nil +} + +// buildSelectFields builds the SELECT fields from SelectField structs +func (qb *QueryBuilder) buildSelectFields(fields []SelectField) []string { + if len(fields) == 0 { + return []string{"*"} + } + + var selectedFields []string + for _, field := range fields { + expr := field.Expression + if expr == "" { + continue + } + // Basic validation for expression + if !qb.isValidExpression(expr) { + continue + } + + // Handle window functions + if field.WindowFunction != nil { + windowFunc, err := qb.buildWindowFunction(*field.WindowFunction) + if err != nil { + continue + } + expr = windowFunc + } + + if field.Alias != "" { + selectedFields = append(selectedFields, fmt.Sprintf("%s AS %s", expr, qb.escapeIdentifier(field.Alias))) + } else { + selectedFields = append(selectedFields, expr) + } + } + + if len(selectedFields) == 0 { + return []string{"*"} + } + + return selectedFields +} + +// BuildWhereClause builds WHERE/HAVING conditions from FilterGroups +func (qb *QueryBuilder) BuildWhereClause(filterGroups []FilterGroup) (string, []interface{}, error) { + if len(filterGroups) == 0 { + return "", nil, nil + } + + var conditions []string + var allArgs []interface{} + + for i, group := range filterGroups { + if len(group.Filters) == 0 { + continue + } + + groupCondition, groupArgs, err := qb.buildFilterGroup(group) + if err != nil { + return "", nil, err + } + + if groupCondition != "" { + if i > 0 { + logicOp := "AND" + if group.LogicOp != "" { + logicOp = strings.ToUpper(group.LogicOp) + } + conditions = append(conditions, logicOp) + } + conditions = append(conditions, fmt.Sprintf("(%s)", groupCondition)) + allArgs = append(allArgs, groupArgs...) + } + } + + return strings.Join(conditions, " "), allArgs, nil +} + +// buildFilterGroup builds conditions for a single filter group +func (qb *QueryBuilder) buildFilterGroup(group FilterGroup) (string, []interface{}, error) { + var conditions []string + var args []interface{} + logicOp := "AND" + if group.LogicOp != "" { + logicOp = strings.ToUpper(group.LogicOp) + } + + for i, filter := range group.Filters { + condition, filterArgs, err := qb.buildFilterCondition(filter) + if err != nil { + return "", nil, err + } + + if condition != "" { + if i > 0 { + conditions = append(conditions, logicOp) + } + conditions = append(conditions, condition) + args = append(args, filterArgs...) + } + } + + return strings.Join(conditions, " "), args, nil +} + +// buildFilterCondition builds a single filter condition with dialect-specific logic +func (qb *QueryBuilder) buildFilterCondition(filter DynamicFilter) (string, []interface{}, error) { + column := qb.validateAndEscapeColumn(filter.Column) + if column == "" { + return "", nil, fmt.Errorf("invalid or disallowed column: %s", filter.Column) + } + + // Handle column-to-column comparison + if valStr, ok := filter.Value.(string); ok && strings.Contains(valStr, ".") && qb.isValidExpression(valStr) && len(strings.Split(valStr, ".")) == 2 { + escapedVal := qb.escapeColumnReference(valStr) + switch filter.Operator { + case OpEqual: + return fmt.Sprintf("%s = %s", column, escapedVal), nil, nil + case OpNotEqual: + return fmt.Sprintf("%s <> %s", column, escapedVal), nil, nil + case OpGreaterThan: + return fmt.Sprintf("%s > %s", column, escapedVal), nil, nil + case OpLessThan: + return fmt.Sprintf("%s < %s", column, escapedVal), nil, nil + } + } + + // Handle JSON operations + switch filter.Operator { + case OpJsonContains, OpJsonNotContains, OpJsonExists, OpJsonNotExists, OpJsonEqual, OpJsonNotEqual: + return qb.buildJsonFilterCondition(filter) + case OpArrayContains, OpArrayNotContains, OpArrayLength: + return qb.buildArrayFilterCondition(filter) + } + + // Handle standard operators + switch filter.Operator { + case OpEqual: + if filter.Value == nil { + return fmt.Sprintf("%s IS NULL", column), nil, nil + } + return fmt.Sprintf("%s = ?", column), []interface{}{filter.Value}, nil + case OpNotEqual: + if filter.Value == nil { + return fmt.Sprintf("%s IS NOT NULL", column), nil, nil + } + return fmt.Sprintf("%s <> ?", column), []interface{}{filter.Value}, nil + case OpLike: + if filter.Value == nil { + return "", nil, nil + } + return fmt.Sprintf("%s LIKE ?", column), []interface{}{filter.Value}, nil + case OpILike: + if filter.Value == nil { + return "", nil, nil + } + switch qb.dbType { + case DBTypePostgreSQL, DBTypeSQLite: + return fmt.Sprintf("%s ILIKE ?", column), []interface{}{filter.Value}, nil + case DBTypeMySQL, DBTypeSQLServer: + return fmt.Sprintf("LOWER(%s) LIKE LOWER(?)", column), []interface{}{filter.Value}, nil + default: + return fmt.Sprintf("%s LIKE ?", column), []interface{}{filter.Value}, nil + } + case OpIn, OpNotIn: + values := qb.parseArrayValue(filter.Value) + if len(values) == 0 { + return "1=0", nil, nil + } + op := "IN" + if filter.Operator == OpNotIn { + op = "NOT IN" + } + placeholders := squirrel.Placeholders(len(values)) + return fmt.Sprintf("%s %s (%s)", column, op, placeholders), values, nil + case OpGreaterThan, OpGreaterThanEqual, OpLessThan, OpLessThanEqual: + if filter.Value == nil { + return "", nil, nil + } + op := strings.TrimPrefix(string(filter.Operator), "_") + return fmt.Sprintf("%s %s ?", column, op), []interface{}{filter.Value}, nil + case OpBetween, OpNotBetween: + values := qb.parseArrayValue(filter.Value) + if len(values) != 2 { + return "", nil, fmt.Errorf("between operator requires exactly 2 values") + } + op := "BETWEEN" + if filter.Operator == OpNotBetween { + op = "NOT BETWEEN" + } + return fmt.Sprintf("%s %s ? AND ?", column, op), []interface{}{values[0], values[1]}, nil + case OpNull: + return fmt.Sprintf("%s IS NULL", column), nil, nil + case OpNotNull: + return fmt.Sprintf("%s IS NOT NULL", column), nil, nil + case OpContains, OpNotContains, OpStartsWith, OpEndsWith: + if filter.Value == nil { + return "", nil, nil + } + var value string + switch filter.Operator { + case OpContains, OpNotContains: + value = fmt.Sprintf("%%%v%%", filter.Value) + case OpStartsWith: + value = fmt.Sprintf("%v%%", filter.Value) + case OpEndsWith: + value = fmt.Sprintf("%%%v", filter.Value) + } + + switch qb.dbType { + case DBTypePostgreSQL, DBTypeSQLite: + op := "ILIKE" + if strings.Contains(string(filter.Operator), "Not") { + op = "NOT ILIKE" + } + return fmt.Sprintf("%s %s ?", column, op), []interface{}{value}, nil + case DBTypeMySQL, DBTypeSQLServer: + op := "LIKE" + if strings.Contains(string(filter.Operator), "Not") { + op = "NOT LIKE" + } + return fmt.Sprintf("LOWER(%s) %s LOWER(?)", column, op), []interface{}{value}, nil + default: + op := "LIKE" + if strings.Contains(string(filter.Operator), "Not") { + op = "NOT LIKE" + } + return fmt.Sprintf("%s %s ?", column, op), []interface{}{value}, nil + } + default: + return "", nil, fmt.Errorf("unsupported operator: %s", filter.Operator) + } +} + +// buildJsonFilterCondition builds a JSON filter condition +func (qb *QueryBuilder) buildJsonFilterCondition(filter DynamicFilter) (string, []interface{}, error) { + column := qb.validateAndEscapeColumn(filter.Column) + if column == "" { + return "", nil, fmt.Errorf("invalid or disallowed column: %s", filter.Column) + } + + path := "$" + if pathOption, ok := filter.Options["path"].(string); ok && pathOption != "" { + path = pathOption + } + + var expr string + var args []interface{} + + switch filter.Operator { + case OpJsonContains: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s @> ?", column) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS(%s, ?, '%s')", column, path) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') = ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') = ?", column, path) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonNotContains: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("NOT (%s @> ?)", column) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("NOT JSON_CONTAINS(%s, ?, '%s')", column, path) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') <> ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') <> ?", column, path) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonExists: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("jsonb_path_exists(%s, '%s')", column, path) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS_PATH(%s, 'one', '%s')", column, path) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') IS NOT NULL", column, qb.escapeSqlServerJsonPath(path)) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') IS NOT NULL", column, path) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonNotExists: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("NOT jsonb_path_exists(%s, '%s')", column, path) + case DBTypeMySQL: + expr = fmt.Sprintf("NOT JSON_CONTAINS_PATH(%s, 'one', '%s')", column, path) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') IS NULL", column, qb.escapeSqlServerJsonPath(path)) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') IS NULL", column, path) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonEqual: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s->>%s = ?", column, qb.escapeJsonPath(path)) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_EXTRACT(%s, '%s') = ?", column, path) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') = ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') = ?", column, path) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + case OpJsonNotEqual: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("%s->>%s <> ?", column, qb.escapeJsonPath(path)) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_EXTRACT(%s, '%s') <> ?", column, path) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("JSON_VALUE(%s, '%s') <> ?", column, qb.escapeSqlServerJsonPath(path)) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("json_extract(%s, '%s') <> ?", column, path) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("JSON operations not supported for database type: %s", qb.dbType) + } + default: + return "", nil, fmt.Errorf("unsupported JSON operator: %s", filter.Operator) + } + + return expr, args, nil +} + +// buildArrayFilterCondition builds an array filter condition +func (qb *QueryBuilder) buildArrayFilterCondition(filter DynamicFilter) (string, []interface{}, error) { + column := qb.validateAndEscapeColumn(filter.Column) + if column == "" { + return "", nil, fmt.Errorf("invalid or disallowed column: %s", filter.Column) + } + + var expr string + var args []interface{} + + switch filter.Operator { + case OpArrayContains: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("? = ANY(%s)", column) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("JSON_CONTAINS(%s, JSON_QUOTE(?))", column) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("? IN (SELECT value FROM OPENJSON(%s))", column) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("EXISTS (SELECT 1 FROM json_each(%s) WHERE json_each.value = ?)", column) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("Array operations not supported for database type: %s", qb.dbType) + } + case OpArrayNotContains: + switch qb.dbType { + case DBTypePostgreSQL: + expr = fmt.Sprintf("? <> ALL(%s)", column) + args = append(args, filter.Value) + case DBTypeMySQL: + expr = fmt.Sprintf("NOT JSON_CONTAINS(%s, JSON_QUOTE(?))", column) + args = append(args, filter.Value) + case DBTypeSQLServer: + expr = fmt.Sprintf("? NOT IN (SELECT value FROM OPENJSON(%s))", column) + args = append(args, filter.Value) + case DBTypeSQLite: + expr = fmt.Sprintf("NOT EXISTS (SELECT 1 FROM json_each(%s) WHERE json_each.value = ?)", column) + args = append(args, filter.Value) + default: + return "", nil, fmt.Errorf("Array operations not supported for database type: %s", qb.dbType) + } + case OpArrayLength: + switch qb.dbType { + case DBTypePostgreSQL: + if lengthOption, ok := filter.Options["length"].(int); ok { + expr = fmt.Sprintf("array_length(%s, 1) = ?", column) + args = append(args, lengthOption) + } else { + return "", nil, fmt.Errorf("array_length operator requires 'length' option") + } + case DBTypeMySQL: + if lengthOption, ok := filter.Options["length"].(int); ok { + expr = fmt.Sprintf("JSON_LENGTH(%s) = ?", column) + args = append(args, lengthOption) + } else { + return "", nil, fmt.Errorf("array_length operator requires 'length' option") + } + case DBTypeSQLServer: + if lengthOption, ok := filter.Options["length"].(int); ok { + expr = fmt.Sprintf("(SELECT COUNT(*) FROM OPENJSON(%s)) = ?", column) + args = append(args, lengthOption) + } else { + return "", nil, fmt.Errorf("array_length operator requires 'length' option") + } + case DBTypeSQLite: + if lengthOption, ok := filter.Options["length"].(int); ok { + expr = fmt.Sprintf("json_array_length(%s) = ?", column) + args = append(args, lengthOption) + } else { + return "", nil, fmt.Errorf("array_length operator requires 'length' option") + } + default: + return "", nil, fmt.Errorf("Array operations not supported for database type: %s", qb.dbType) + } + default: + return "", nil, fmt.Errorf("unsupported array operator: %s", filter.Operator) + } + + return expr, args, nil +} + +// ============================================================================= +// SECTION 6: EXECUTION METHODS (NEW) +// Metode untuk mengeksekusi query langsung dengan logging performa. +// ============================================================================= + +func (qb *QueryBuilder) ExecuteQuery(ctx context.Context, db *sqlx.DB, query DynamicQuery, dest interface{}) error { + sql, args, err := qb.BuildQuery(query) + if err != nil { + return err + } + start := time.Now() + err = db.SelectContext(ctx, dest, sql, args...) + fmt.Printf("[DEBUG] Query executed in %v\n", time.Since(start)) + return err +} + +func (qb *QueryBuilder) ExecuteQueryRow(ctx context.Context, db *sqlx.DB, query DynamicQuery, dest interface{}) error { + sql, args, err := qb.BuildQuery(query) + if err != nil { + return err + } + start := time.Now() + err = db.GetContext(ctx, dest, sql, args...) + fmt.Printf("[DEBUG] QueryRow executed in %v\n", time.Since(start)) + return err +} + +func (qb *QueryBuilder) ExecuteCount(ctx context.Context, db *sqlx.DB, query DynamicQuery) (int64, error) { + sql, args, err := qb.BuildCountQuery(query) + if err != nil { + return 0, err + } + var count int64 + start := time.Now() + err = db.GetContext(ctx, &count, sql, args...) + fmt.Printf("[DEBUG] Count query executed in %v\n", time.Since(start)) + return count, err +} + +func (qb *QueryBuilder) ExecuteInsert(ctx context.Context, db *sqlx.DB, table string, data InsertData, returningColumns ...string) (sql.Result, error) { + sql, args, err := qb.BuildInsertQuery(table, data, returningColumns...) + if err != nil { + return nil, err + } + start := time.Now() + result, err := db.ExecContext(ctx, sql, args...) + fmt.Printf("[DEBUG] Insert query executed in %v\n", time.Since(start)) + return result, err +} + +func (qb *QueryBuilder) ExecuteUpdate(ctx context.Context, db *sqlx.DB, table string, updateData UpdateData, filters []FilterGroup, returningColumns ...string) (sql.Result, error) { + sql, args, err := qb.BuildUpdateQuery(table, updateData, filters, returningColumns...) + if err != nil { + return nil, err + } + start := time.Now() + result, err := db.ExecContext(ctx, sql, args...) + fmt.Printf("[DEBUG] Update query executed in %v\n", time.Since(start)) + return result, err +} + +func (qb *QueryBuilder) ExecuteDelete(ctx context.Context, db *sqlx.DB, table string, filters []FilterGroup, returningColumns ...string) (sql.Result, error) { + sql, args, err := qb.BuildDeleteQuery(table, filters, returningColumns...) + if err != nil { + return nil, err + } + start := time.Now() + result, err := db.ExecContext(ctx, sql, args...) + fmt.Printf("[DEBUG] Delete query executed in %v\n", time.Since(start)) + return result, err +} + +func (qb *QueryBuilder) ExecuteUpsert(ctx context.Context, db *sqlx.DB, table string, insertData InsertData, conflictColumns []string, updateColumns []string, returningColumns ...string) (sql.Result, error) { + sql, args, err := qb.BuildUpsertQuery(table, insertData, conflictColumns, updateColumns, returningColumns...) + if err != nil { + return nil, err + } + start := time.Now() + result, err := db.ExecContext(ctx, sql, args...) + fmt.Printf("[DEBUG] Upsert query executed in %v\n", time.Since(start)) + return result, err +} + +// --- Helper and Validation Methods --- + +func (qb *QueryBuilder) buildGroupByColumns(fields []string) []string { + var groupCols []string + for _, field := range fields { + col := qb.validateAndEscapeColumn(field) + if col != "" { + groupCols = append(groupCols, col) + } + } + return groupCols +} + +func (qb *QueryBuilder) parseArrayValue(value interface{}) []interface{} { + if value == nil { + return nil + } + if reflect.TypeOf(value).Kind() == reflect.Slice { + v := reflect.ValueOf(value) + result := make([]interface{}, v.Len()) + for i := 0; i < v.Len(); i++ { + result[i] = v.Index(i).Interface() + } + return result + } + if str, ok := value.(string); ok { + if strings.Contains(str, ",") { + parts := strings.Split(str, ",") + result := make([]interface{}, len(parts)) + for i, part := range parts { + result[i] = strings.TrimSpace(part) + } + return result + } + return []interface{}{str} + } + return []interface{}{value} +} + +func (qb *QueryBuilder) validateAndEscapeColumn(field string) string { + if field == "" { + return "" + } + // Allow complex expressions like functions + if strings.Contains(field, "(") { + if qb.isValidExpression(field) { + return field // Don't escape complex expressions, assume they are safe + } + return "" + } + // Handle dotted column names like "table.column" + if strings.Contains(field, ".") { + if qb.isValidExpression(field) { + // Split on dot and escape each part + parts := strings.Split(field, ".") + var escapedParts []string + for _, part := range parts { + escapedParts = append(escapedParts, qb.escapeIdentifier(part)) + } + return strings.Join(escapedParts, ".") + } + return "" + } + // Simple column name + if qb.allowedColumns != nil && !qb.allowedColumns[field] { + return "" + } + return qb.escapeIdentifier(field) +} + +func (qb *QueryBuilder) isValidExpression(expr string) bool { + // This is a simplified check. A more robust solution might use a proper SQL parser library. + // For now, we allow alphanumeric, underscore, dots, parentheses, and common operators. + // For SQL Server, allow brackets [] and spaces for column names. + allowedChars := "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.,() *-/[]" + for _, r := range expr { + if !strings.ContainsRune(allowedChars, r) { + return false + } + } + // Check for dangerous keywords + dangerousPatterns := []string{"--", "/*", "*/", "union", "select", "insert", "update", "delete", "drop", "alter", "create", "exec", "execute"} + lowerExpr := strings.ToLower(expr) + for _, pattern := range dangerousPatterns { + if strings.Contains(lowerExpr, pattern) { + return false + } + } + return true +} + +func (qb *QueryBuilder) isValidFunctionName(name string) bool { + // Check if the function name is a valid SQL function + validFunctions := map[string]bool{ + // Aggregate functions + "count": true, "sum": true, "avg": true, "min": true, "max": true, + // Window functions + "row_number": true, "rank": true, "dense_rank": true, "ntile": true, + "lag": true, "lead": true, "first_value": true, "last_value": true, + // JSON functions + "json_extract": true, "json_contains": true, "json_search": true, + "json_array": true, "json_object": true, "json_merge": true, + // Other functions + "concat": true, "substring": true, "upper": true, "lower": true, + "trim": true, "coalesce": true, "nullif": true, "isnull": true, + } + + return validFunctions[strings.ToLower(name)] +} + +func (qb *QueryBuilder) escapeColumnReference(col string) string { + parts := strings.Split(col, ".") + var escaped []string + for _, p := range parts { + if strings.HasPrefix(p, "[") && strings.HasSuffix(p, "]") { + escaped = append(escaped, p) + } else { + escaped = append(escaped, qb.escapeIdentifier(p)) + } + } + return strings.Join(escaped, ".") +} + +func (qb *QueryBuilder) escapeIdentifier(col string) string { + switch qb.dbType { + case DBTypePostgreSQL, DBTypeSQLite: + return fmt.Sprintf("\"%s\"", strings.ReplaceAll(col, "\"", "\"\"")) + case DBTypeMySQL: + return fmt.Sprintf("`%s`", strings.ReplaceAll(col, "`", "``")) + case DBTypeSQLServer: + return fmt.Sprintf("[%s]", strings.ReplaceAll(col, "]", "]]")) + default: + return col + } +} + +// checkForSqlInjection checks for potential SQL injection patterns +func (qb *QueryBuilder) checkForSqlInjection(sql string) error { + if !qb.enableSecurityChecks { + return nil + } + + lowerSQL := strings.ToLower(sql) + for _, pattern := range qb.dangerousPatterns { + if pattern.MatchString(lowerSQL) { + return fmt.Errorf("potential SQL injection detected: pattern %s matched", pattern.String()) + } + } + return nil +} + +// --- Other Query Builders (Insert, Update, Delete, Upsert, Count) --- + +// BuildCountQuery builds a count query +func (qb *QueryBuilder) BuildCountQuery(query DynamicQuery) (string, []interface{}, error) { + // For a count query, we don't need fields, joins, or unions. + // We only need FROM, WHERE, GROUP BY, HAVING. + countQuery := DynamicQuery{ + From: query.From, + Aliases: query.Aliases, + Filters: query.Filters, + GroupBy: query.GroupBy, + Having: query.Having, + // Joins are important for count with filters on joined tables + Joins: query.Joins, + } + + // Build the base query for the count using Squirrel's From and Join methods + fromClause := qb.buildFromClause(countQuery.From, countQuery.Aliases) + baseQuery := qb.sqlBuilder.Select("COUNT(*)").From(fromClause) + + // Add JOINs using Squirrel's Join method + if len(countQuery.Joins) > 0 { + for _, join := range countQuery.Joins { + joinType, tableWithAlias, onClause, joinArgs, err := qb.buildSingleJoinClause(join) + if err != nil { + return "", nil, err + } + joinStr := tableWithAlias + " ON " + onClause + switch strings.ToUpper(joinType) { + case "LEFT": + baseQuery = baseQuery.LeftJoin(joinStr, joinArgs...) + case "RIGHT": + baseQuery = baseQuery.RightJoin(joinStr, joinArgs...) + case "FULL": + baseQuery = baseQuery.Join("FULL JOIN "+joinStr, joinArgs...) + default: + baseQuery = baseQuery.Join(joinStr, joinArgs...) + } + } + } + + if len(countQuery.Filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(countQuery.Filters) + if err != nil { + return "", nil, err + } + baseQuery = baseQuery.Where(whereClause, whereArgs...) + } + + if len(countQuery.GroupBy) > 0 { + baseQuery = baseQuery.GroupBy(qb.buildGroupByColumns(countQuery.GroupBy)...) + } + + if len(countQuery.Having) > 0 { + havingClause, havingArgs, err := qb.BuildWhereClause(countQuery.Having) + if err != nil { + return "", nil, err + } + baseQuery = baseQuery.Having(havingClause, havingArgs...) + } + + sql, args, err := baseQuery.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build COUNT query: %w", err) + } + + fmt.Printf("[DEBUG] COUNT SQL query: %s\n", sql) + fmt.Printf("[DEBUG] COUNT query args: %v\n", args) + return sql, args, nil +} + +// BuildInsertQuery builds an INSERT query +func (qb *QueryBuilder) BuildInsertQuery(table string, data InsertData, returningColumns ...string) (string, []interface{}, error) { + // Validate columns + for _, col := range data.Columns { + if qb.allowedColumns != nil && !qb.allowedColumns[col] { + return "", nil, fmt.Errorf("disallowed column: %s", col) + } + } + + // Start with basic insert + insert := qb.sqlBuilder.Insert(table).Columns(data.Columns...).Values(data.Values...) + + // Handle JSON values - we need to modify the insert statement + if len(data.JsonValues) > 0 { + // Create a new insert builder with all columns including JSON columns + allColumns := make([]string, len(data.Columns)) + copy(allColumns, data.Columns) + + allValues := make([]interface{}, len(data.Values)) + copy(allValues, data.Values) + + for col, val := range data.JsonValues { + allColumns = append(allColumns, col) + jsonVal, err := json.Marshal(val) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + allValues = append(allValues, jsonVal) + } + + insert = qb.sqlBuilder.Insert(table).Columns(allColumns...).Values(allValues...) + } + + if len(returningColumns) > 0 { + if qb.dbType == DBTypePostgreSQL { + insert = insert.Suffix("RETURNING " + strings.Join(returningColumns, ", ")) + } else { + return "", nil, fmt.Errorf("RETURNING not supported for database type: %s", qb.dbType) + } + } + + sql, args, err := insert.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build INSERT query: %w", err) + } + + return sql, args, nil +} + +// BuildUpdateQuery builds an UPDATE query +func (qb *QueryBuilder) BuildUpdateQuery(table string, updateData UpdateData, filters []FilterGroup, returningColumns ...string) (string, []interface{}, error) { + // Validate columns + for _, col := range updateData.Columns { + if qb.allowedColumns != nil && !qb.allowedColumns[col] { + return "", nil, fmt.Errorf("disallowed column: %s", col) + } + } + + // Start with basic update + update := qb.sqlBuilder.Update(table).SetMap(qb.buildSetMap(updateData)) + + // Handle JSON updates - we need to modify the update statement + if len(updateData.JsonUpdates) > 0 { + // Create a new set map including JSON updates + setMap := qb.buildSetMap(updateData) + + for col, jsonUpdate := range updateData.JsonUpdates { + switch qb.dbType { + case DBTypePostgreSQL: + jsonVal, err := json.Marshal(jsonUpdate.Value) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + // Use jsonb_set function for updating specific paths + setMap[col] = squirrel.Expr(fmt.Sprintf("jsonb_set(%s, '%s', ?)", qb.escapeIdentifier(col), jsonUpdate.Path), jsonVal) + case DBTypeMySQL: + jsonVal, err := json.Marshal(jsonUpdate.Value) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + // Use JSON_SET function for updating specific paths + setMap[col] = squirrel.Expr(fmt.Sprintf("JSON_SET(%s, '%s', ?)", qb.escapeIdentifier(col), jsonUpdate.Path), jsonVal) + case DBTypeSQLServer: + jsonVal, err := json.Marshal(jsonUpdate.Value) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + // Use JSON_MODIFY function for updating specific paths + setMap[col] = squirrel.Expr(fmt.Sprintf("JSON_MODIFY(%s, '%s', ?)", qb.escapeIdentifier(col), jsonUpdate.Path), jsonVal) + case DBTypeSQLite: + jsonVal, err := json.Marshal(jsonUpdate.Value) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + // SQLite doesn't have a built-in JSON_SET function, so we need to use json_patch + setMap[col] = squirrel.Expr(fmt.Sprintf("json_patch(%s, ?)", qb.escapeIdentifier(col)), jsonVal) + } + } + + update = qb.sqlBuilder.Update(table).SetMap(setMap) + } + + if len(filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(filters) + if err != nil { + return "", nil, err + } + update = update.Where(whereClause, whereArgs...) + } + + if len(returningColumns) > 0 { + if qb.dbType == DBTypePostgreSQL { + update = update.Suffix("RETURNING " + strings.Join(returningColumns, ", ")) + } else { + return "", nil, fmt.Errorf("RETURNING not supported for database type: %s", qb.dbType) + } + } + + sql, args, err := update.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build UPDATE query: %w", err) + } + + return sql, args, nil +} + +// buildSetMap builds a map for SetMap from UpdateData +func (qb *QueryBuilder) buildSetMap(updateData UpdateData) map[string]interface{} { + setMap := make(map[string]interface{}) + for i, col := range updateData.Columns { + setMap[col] = updateData.Values[i] + } + return setMap +} + +// BuildDeleteQuery builds a DELETE query +func (qb *QueryBuilder) BuildDeleteQuery(table string, filters []FilterGroup, returningColumns ...string) (string, []interface{}, error) { + delete := qb.sqlBuilder.Delete(table) + + if len(filters) > 0 { + whereClause, whereArgs, err := qb.BuildWhereClause(filters) + if err != nil { + return "", nil, err + } + delete = delete.Where(whereClause, whereArgs...) + } + + if len(returningColumns) > 0 { + if qb.dbType == DBTypePostgreSQL { + delete = delete.Suffix("RETURNING " + strings.Join(returningColumns, ", ")) + } else { + return "", nil, fmt.Errorf("RETURNING not supported for database type: %s", qb.dbType) + } + } + + sql, args, err := delete.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build DELETE query: %w", err) + } + + return sql, args, nil +} + +// BuildUpsertQuery builds an UPSERT query +func (qb *QueryBuilder) BuildUpsertQuery(table string, insertData InsertData, conflictColumns []string, updateColumns []string, returningColumns ...string) (string, []interface{}, error) { + // Validate columns + for _, col := range insertData.Columns { + if qb.allowedColumns != nil && !qb.allowedColumns[col] { + return "", nil, fmt.Errorf("disallowed column: %s", col) + } + } + for _, col := range updateColumns { + if qb.allowedColumns != nil && !qb.allowedColumns[col] { + return "", nil, fmt.Errorf("disallowed column: %s", col) + } + } + + switch qb.dbType { + case DBTypePostgreSQL: + // Handle JSON values for PostgreSQL + allColumns := make([]string, len(insertData.Columns)) + copy(allColumns, insertData.Columns) + + allValues := make([]interface{}, len(insertData.Values)) + copy(allValues, insertData.Values) + + for col, val := range insertData.JsonValues { + allColumns = append(allColumns, col) + jsonVal, err := json.Marshal(val) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + allValues = append(allValues, jsonVal) + } + + insert := qb.sqlBuilder.Insert(table).Columns(allColumns...).Values(allValues...) + if len(conflictColumns) > 0 { + conflictTarget := strings.Join(conflictColumns, ", ") + setClause := "" + for _, col := range updateColumns { + if setClause != "" { + setClause += ", " + } + setClause += fmt.Sprintf("%s = EXCLUDED.%s", qb.escapeIdentifier(col), qb.escapeIdentifier(col)) + } + insert = insert.Suffix(fmt.Sprintf("ON CONFLICT (%s) DO UPDATE SET %s", conflictTarget, setClause)) + } + if len(returningColumns) > 0 { + insert = insert.Suffix("RETURNING " + strings.Join(returningColumns, ", ")) + } + sql, args, err := insert.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build UPSERT query: %w", err) + } + return sql, args, nil + case DBTypeMySQL: + // Handle JSON values for MySQL + allColumns := make([]string, len(insertData.Columns)) + copy(allColumns, insertData.Columns) + + allValues := make([]interface{}, len(insertData.Values)) + copy(allValues, insertData.Values) + + for col, val := range insertData.JsonValues { + allColumns = append(allColumns, col) + jsonVal, err := json.Marshal(val) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal JSON value for column %s: %w", col, err) + } + allValues = append(allValues, jsonVal) + } + + insert := qb.sqlBuilder.Insert(table).Columns(allColumns...).Values(allValues...) + if len(updateColumns) > 0 { + setClause := "" + for _, col := range updateColumns { + if setClause != "" { + setClause += ", " + } + setClause += fmt.Sprintf("%s = VALUES(%s)", qb.escapeIdentifier(col), qb.escapeIdentifier(col)) + } + insert = insert.Suffix(fmt.Sprintf("ON DUPLICATE KEY UPDATE %s", setClause)) + } + sql, args, err := insert.ToSql() + if err != nil { + return "", nil, fmt.Errorf("failed to build UPSERT query: %w", err) + } + return sql, args, nil + default: + return "", nil, fmt.Errorf("UPSERT not supported for database type: %s", qb.dbType) + } +} + +// --- QueryParser (for parsing URL query strings) --- + +type QueryParser struct { + defaultLimit int + maxLimit int +} + +func NewQueryParser() *QueryParser { + return &QueryParser{defaultLimit: 10, maxLimit: 100} +} + +func (qp *QueryParser) SetLimits(defaultLimit, maxLimit int) *QueryParser { + qp.defaultLimit = defaultLimit + qp.maxLimit = maxLimit + return qp +} + +// ParseQuery parses URL query parameters into a DynamicQuery struct. +func (qp *QueryParser) ParseQuery(values url.Values, defaultTable string) (DynamicQuery, error) { + query := DynamicQuery{ + From: defaultTable, + Limit: qp.defaultLimit, + Offset: 0, + } + + // Parse fields + if fields := values.Get("fields"); fields != "" { + if fields == "*" { + query.Fields = []SelectField{{Expression: "*"}} + } else { + fieldList := strings.Split(fields, ",") + for _, field := range fieldList { + query.Fields = append(query.Fields, SelectField{Expression: strings.TrimSpace(field)}) + } + } + } else { + query.Fields = []SelectField{{Expression: "*"}} + } + + // Parse pagination + if limit := values.Get("limit"); limit != "" { + if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= qp.maxLimit { + query.Limit = l + } + } + if offset := values.Get("offset"); offset != "" { + if o, err := strconv.Atoi(offset); err == nil && o >= 0 { + query.Offset = o + } + } + + // Parse filters + filters, err := qp.parseFilters(values) + if err != nil { + return query, err + } + query.Filters = filters + + // Parse sorting + sorts, err := qp.parseSorting(values) + if err != nil { + return query, err + } + query.Sort = sorts + + return query, nil +} + +func (qp *QueryParser) parseFilters(values url.Values) ([]FilterGroup, error) { + filterMap := make(map[string]map[string]string) + for key, vals := range values { + if strings.HasPrefix(key, "filter[") && strings.HasSuffix(key, "]") { + parts := strings.Split(key[7:len(key)-1], "][") + if len(parts) == 2 { + column, operator := parts[0], parts[1] + if filterMap[column] == nil { + filterMap[column] = make(map[string]string) + } + if len(vals) > 0 { + filterMap[column][operator] = vals[0] + } + } + } + } + if len(filterMap) == 0 { + return nil, nil + } + var filters []DynamicFilter + for column, operators := range filterMap { + for opStr, value := range operators { + operator := FilterOperator(opStr) + var parsedValue interface{} + switch operator { + case OpIn, OpNotIn: + if value != "" { + parsedValue = strings.Split(value, ",") + } + case OpBetween, OpNotBetween: + if value != "" { + parts := strings.Split(value, ",") + if len(parts) == 2 { + parsedValue = []interface{}{strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1])} + } + } + case OpNull, OpNotNull: + parsedValue = nil + default: + parsedValue = value + } + filters = append(filters, DynamicFilter{Column: column, Operator: operator, Value: parsedValue}) + } + } + if len(filters) == 0 { + return nil, nil + } + return []FilterGroup{{Filters: filters, LogicOp: "AND"}}, nil +} + +func (qp *QueryParser) parseSorting(values url.Values) ([]SortField, error) { + sortParam := values.Get("sort") + if sortParam == "" { + return nil, nil + } + var sorts []SortField + fields := strings.Split(sortParam, ",") + for _, field := range fields { + field = strings.TrimSpace(field) + if field == "" { + continue + } + order, column := "ASC", field + if strings.HasPrefix(field, "-") { + order = "DESC" + column = field[1:] + } else if strings.HasPrefix(field, "+") { + column = field[1:] + } + sorts = append(sorts, SortField{Column: column, Order: order}) + } + return sorts, nil +} + +// ============================================================================= +// MONGODB QUERY BUILDER +// ============================================================================= + +// MongoQueryBuilder builds MongoDB queries from dynamic filters +type MongoQueryBuilder struct { + allowedFields map[string]bool // Security: only allow specified fields + // Security settings + enableSecurityChecks bool + maxAllowedDocs int +} + +// NewMongoQueryBuilder creates a new MongoDB query builder instance +func NewMongoQueryBuilder() *MongoQueryBuilder { + return &MongoQueryBuilder{ + allowedFields: make(map[string]bool), + enableSecurityChecks: true, + maxAllowedDocs: 10000, + } +} + +// SetSecurityOptions configures security settings +func (mqb *MongoQueryBuilder) SetSecurityOptions(enableChecks bool, maxDocs int) *MongoQueryBuilder { + mqb.enableSecurityChecks = enableChecks + mqb.maxAllowedDocs = maxDocs + return mqb +} + +// SetAllowedFields sets the list of allowed fields for security +func (mqb *MongoQueryBuilder) SetAllowedFields(fields []string) *MongoQueryBuilder { + mqb.allowedFields = make(map[string]bool) + for _, field := range fields { + mqb.allowedFields[field] = true + } + return mqb +} + +// BuildFindQuery builds a MongoDB find query from DynamicQuery +func (mqb *MongoQueryBuilder) BuildFindQuery(query DynamicQuery) (bson.M, *options.FindOptions, error) { + filter := bson.M{} + findOptions := options.Find() + + // Security check for limit + if mqb.enableSecurityChecks && query.Limit > mqb.maxAllowedDocs { + return nil, nil, fmt.Errorf("requested limit %d exceeds maximum allowed %d", query.Limit, mqb.maxAllowedDocs) + } + + // Build filter from DynamicQuery filters + if len(query.Filters) > 0 { + mongoFilter, err := mqb.buildFilter(query.Filters) + if err != nil { + return nil, nil, err + } + filter = mongoFilter + } + + // Set projection from fields + if len(query.Fields) > 0 { + projection := bson.M{} + for _, field := range query.Fields { + if field.Expression == "*" { + // Include all fields + continue + } + fieldName := field.Expression + if field.Alias != "" { + fieldName = field.Alias + } + if mqb.allowedFields != nil && !mqb.allowedFields[fieldName] { + return nil, nil, fmt.Errorf("disallowed field: %s", fieldName) + } + projection[fieldName] = 1 + } + if len(projection) > 0 { + findOptions.SetProjection(projection) + } + } + + // Set sort + if len(query.Sort) > 0 { + sort := bson.D{} + for _, sortField := range query.Sort { + fieldName := sortField.Column + if mqb.allowedFields != nil && !mqb.allowedFields[fieldName] { + return nil, nil, fmt.Errorf("disallowed field: %s", fieldName) + } + order := 1 // ASC + if strings.ToUpper(sortField.Order) == "DESC" { + order = -1 // DESC + } + sort = append(sort, bson.E{Key: fieldName, Value: order}) + } + findOptions.SetSort(sort) + } + + // Set limit and offset + if query.Limit > 0 { + findOptions.SetLimit(int64(query.Limit)) + } + if query.Offset > 0 { + findOptions.SetSkip(int64(query.Offset)) + } + + return filter, findOptions, nil +} + +// BuildAggregateQuery builds a MongoDB aggregation pipeline from DynamicQuery +func (mqb *MongoQueryBuilder) BuildAggregateQuery(query DynamicQuery) ([]bson.D, error) { + pipeline := []bson.D{} + + // Handle CTEs as stages in the pipeline + if len(query.CTEs) > 0 { + for _, cte := range query.CTEs { + subPipeline, err := mqb.BuildAggregateQuery(cte.Query) + if err != nil { + return nil, fmt.Errorf("failed to build CTE '%s': %w", cte.Name, err) + } + // Add $lookup stage for joins + if len(cte.Query.Joins) > 0 { + for _, join := range cte.Query.Joins { + lookupStage := bson.D{ + {Key: "$lookup", Value: bson.D{ + {Key: "from", Value: join.Table}, + {Key: "localField", Value: join.Alias}, + {Key: "foreignField", Value: "_id"}, + {Key: "as", Value: join.Alias}, + }}, + } + pipeline = append(pipeline, lookupStage) + } + } + // Add the sub-pipeline + pipeline = append(pipeline, subPipeline...) + } + } + + // Match stage for filters + if len(query.Filters) > 0 { + filter, err := mqb.buildFilter(query.Filters) + if err != nil { + return nil, err + } + pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) + } + + // Group stage for GROUP BY + if len(query.GroupBy) > 0 { + groupID := bson.D{} + for _, field := range query.GroupBy { + if mqb.allowedFields != nil && !mqb.allowedFields[field] { + return nil, fmt.Errorf("disallowed field: %s", field) + } + groupID = append(groupID, bson.E{Key: field, Value: "$" + field}) + } + + groupStage := bson.D{ + {Key: "$group", Value: bson.D{ + {Key: "_id", Value: groupID}, + }}, + } + + // Add any aggregations from fields + for _, field := range query.Fields { + if strings.Contains(field.Expression, "(") && strings.Contains(field.Expression, ")") { + // This is an aggregation function + funcName := strings.Split(field.Expression, "(")[0] + funcField := strings.TrimSuffix(strings.Split(field.Expression, "(")[1], ")") + + if mqb.allowedFields != nil && !mqb.allowedFields[funcField] { + return nil, fmt.Errorf("disallowed field: %s", funcField) + } + + switch strings.ToLower(funcName) { + case "count": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$sum", Value: 1}}, + }) + case "sum": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$sum", Value: "$" + funcField}}, + }) + case "avg": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$avg", Value: "$" + funcField}}, + }) + case "min": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$min", Value: "$" + funcField}}, + }) + case "max": + groupStage = append(groupStage, bson.E{ + Key: field.Alias, Value: bson.D{{Key: "$max", Value: "$" + funcField}}, + }) + } + } + } + + pipeline = append(pipeline, groupStage) + } + + // Sort stage + if len(query.Sort) > 0 { + sort := bson.D{} + for _, sortField := range query.Sort { + fieldName := sortField.Column + if mqb.allowedFields != nil && !mqb.allowedFields[fieldName] { + return nil, fmt.Errorf("disallowed field: %s", fieldName) + } + order := 1 // ASC + if strings.ToUpper(sortField.Order) == "DESC" { + order = -1 // DESC + } + sort = append(sort, bson.E{Key: fieldName, Value: order}) + } + pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) + } + + // Skip and limit stages + if query.Offset > 0 { + pipeline = append(pipeline, bson.D{{Key: "$skip", Value: query.Offset}}) + } + if query.Limit > 0 { + pipeline = append(pipeline, bson.D{{Key: "$limit", Value: query.Limit}}) + } + + return pipeline, nil +} + +// buildFilter builds a MongoDB filter from FilterGroups +func (mqb *MongoQueryBuilder) buildFilter(filterGroups []FilterGroup) (bson.M, error) { + if len(filterGroups) == 0 { + return bson.M{}, nil + } + + var result bson.M + var err error + + for i, group := range filterGroups { + if len(group.Filters) == 0 { + continue + } + + groupFilter, err := mqb.buildFilterGroup(group) + if err != nil { + return nil, err + } + + if i == 0 { + result = groupFilter + } else { + logicOp := "$and" + if group.LogicOp != "" { + switch strings.ToUpper(group.LogicOp) { + case "OR": + logicOp = "$or" + } + } + result = bson.M{logicOp: []bson.M{result, groupFilter}} + } + } + + return result, err +} + +// buildFilterGroup builds a filter for a single filter group +func (mqb *MongoQueryBuilder) buildFilterGroup(group FilterGroup) (bson.M, error) { + var filters []bson.M + logicOp := "$and" + if group.LogicOp != "" { + switch strings.ToUpper(group.LogicOp) { + case "OR": + logicOp = "$or" + } + } + + for _, filter := range group.Filters { + fieldFilter, err := mqb.buildFilterCondition(filter) + if err != nil { + return nil, err + } + filters = append(filters, fieldFilter) + } + + if len(filters) == 1 { + return filters[0], nil + } + return bson.M{logicOp: filters}, nil +} + +// buildFilterCondition builds a single filter condition for MongoDB +func (mqb *MongoQueryBuilder) buildFilterCondition(filter DynamicFilter) (bson.M, error) { + field := filter.Column + if mqb.allowedFields != nil && !mqb.allowedFields[field] { + return nil, fmt.Errorf("disallowed field: %s", field) + } + + switch filter.Operator { + case OpEqual: + return bson.M{field: filter.Value}, nil + case OpNotEqual: + return bson.M{field: bson.M{"$ne": filter.Value}}, nil + case OpIn: + values := mqb.parseArrayValue(filter.Value) + return bson.M{field: bson.M{"$in": values}}, nil + case OpNotIn: + values := mqb.parseArrayValue(filter.Value) + return bson.M{field: bson.M{"$nin": values}}, nil + case OpGreaterThan: + return bson.M{field: bson.M{"$gt": filter.Value}}, nil + case OpGreaterThanEqual: + return bson.M{field: bson.M{"$gte": filter.Value}}, nil + case OpLessThan: + return bson.M{field: bson.M{"$lt": filter.Value}}, nil + case OpLessThanEqual: + return bson.M{field: bson.M{"$lte": filter.Value}}, nil + case OpLike: + // Convert SQL LIKE to MongoDB regex + pattern := filter.Value.(string) + pattern = strings.ReplaceAll(pattern, "%", ".*") + pattern = strings.ReplaceAll(pattern, "_", ".") + return bson.M{field: bson.M{"$regex": pattern, "$options": "i"}}, nil + case OpILike: + // Case-insensitive like + pattern := filter.Value.(string) + pattern = strings.ReplaceAll(pattern, "%", ".*") + pattern = strings.ReplaceAll(pattern, "_", ".") + return bson.M{field: bson.M{"$regex": pattern, "$options": "i"}}, nil + case OpContains: + // Contains substring + pattern := filter.Value.(string) + return bson.M{field: bson.M{"$regex": pattern, "$options": "i"}}, nil + case OpNotContains: + // Does not contain substring + pattern := filter.Value.(string) + return bson.M{field: bson.M{"$not": bson.M{"$regex": pattern, "$options": "i"}}}, nil + case OpStartsWith: + // Starts with + pattern := filter.Value.(string) + return bson.M{field: bson.M{"$regex": "^" + pattern, "$options": "i"}}, nil + case OpEndsWith: + // Ends with + pattern := filter.Value.(string) + return bson.M{field: bson.M{"$regex": pattern + "$", "$options": "i"}}, nil + case OpNull: + return bson.M{field: bson.M{"$exists": false}}, nil + case OpNotNull: + return bson.M{field: bson.M{"$exists": true}}, nil + case OpJsonContains: + // JSON contains + return bson.M{field: bson.M{"$elemMatch": filter.Value}}, nil + case OpJsonNotContains: + // JSON does not contain + return bson.M{field: bson.M{"$not": bson.M{"$elemMatch": filter.Value}}}, nil + case OpJsonExists: + // JSON path exists + return bson.M{field + "." + filter.Options["path"].(string): bson.M{"$exists": true}}, nil + case OpJsonNotExists: + // JSON path does not exist + return bson.M{field + "." + filter.Options["path"].(string): bson.M{"$exists": false}}, nil + case OpArrayContains: + // Array contains + return bson.M{field: bson.M{"$elemMatch": bson.M{"$eq": filter.Value}}}, nil + case OpArrayNotContains: + // Array does not contain + return bson.M{field: bson.M{"$not": bson.M{"$elemMatch": bson.M{"$eq": filter.Value}}}}, nil + case OpArrayLength: + // Array length + if lengthOption, ok := filter.Options["length"].(int); ok { + return bson.M{field: bson.M{"$size": lengthOption}}, nil + } + return nil, fmt.Errorf("array_length operator requires 'length' option") + default: + return nil, fmt.Errorf("unsupported operator: %s", filter.Operator) + } +} + +// parseArrayValue parses an array value for MongoDB +func (mqb *MongoQueryBuilder) parseArrayValue(value interface{}) []interface{} { + if value == nil { + return nil + } + if reflect.TypeOf(value).Kind() == reflect.Slice { + v := reflect.ValueOf(value) + result := make([]interface{}, v.Len()) + for i := 0; i < v.Len(); i++ { + result[i] = v.Index(i).Interface() + } + return result + } + if str, ok := value.(string); ok { + if strings.Contains(str, ",") { + parts := strings.Split(str, ",") + result := make([]interface{}, len(parts)) + for i, part := range parts { + result[i] = strings.TrimSpace(part) + } + return result + } + return []interface{}{str} + } + return []interface{}{value} +} + +// ExecuteFind executes a MongoDB find query +func (mqb *MongoQueryBuilder) ExecuteFind(ctx context.Context, collection *mongo.Collection, query DynamicQuery, dest interface{}) error { + filter, findOptions, err := mqb.BuildFindQuery(query) + if err != nil { + return err + } + start := time.Now() + cursor, err := collection.Find(ctx, filter, findOptions) + if err != nil { + return err + } + defer cursor.Close(ctx) + err = cursor.All(ctx, dest) + fmt.Printf("[DEBUG] MongoDB Find executed in %v\n", time.Since(start)) + return err +} + +// ExecuteAggregate executes a MongoDB aggregation pipeline +func (mqb *MongoQueryBuilder) ExecuteAggregate(ctx context.Context, collection *mongo.Collection, query DynamicQuery, dest interface{}) error { + pipeline, err := mqb.BuildAggregateQuery(query) + if err != nil { + return err + } + start := time.Now() + cursor, err := collection.Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + err = cursor.All(ctx, dest) + fmt.Printf("[DEBUG] MongoDB Aggregate executed in %v\n", time.Since(start)) + return err +} + +// ExecuteCount executes a MongoDB count query +func (mqb *MongoQueryBuilder) ExecuteCount(ctx context.Context, collection *mongo.Collection, query DynamicQuery) (int64, error) { + filter, _, err := mqb.BuildFindQuery(query) + if err != nil { + return 0, err + } + start := time.Now() + count, err := collection.CountDocuments(ctx, filter) + fmt.Printf("[DEBUG] MongoDB Count executed in %v\n", time.Since(start)) + return count, err +} + +// ExecuteInsert executes a MongoDB insert operation +func (mqb *MongoQueryBuilder) ExecuteInsert(ctx context.Context, collection *mongo.Collection, data InsertData) (*mongo.InsertOneResult, error) { + document := bson.M{} + for i, col := range data.Columns { + if mqb.allowedFields != nil && !mqb.allowedFields[col] { + return nil, fmt.Errorf("disallowed field: %s", col) + } + document[col] = data.Values[i] + } + + // Handle JSON values + for col, val := range data.JsonValues { + if mqb.allowedFields != nil && !mqb.allowedFields[col] { + return nil, fmt.Errorf("disallowed field: %s", col) + } + document[col] = val + } + + start := time.Now() + result, err := collection.InsertOne(ctx, document) + fmt.Printf("[DEBUG] MongoDB Insert executed in %v\n", time.Since(start)) + return result, err +} + +// ExecuteUpdate executes a MongoDB update operation +func (mqb *MongoQueryBuilder) ExecuteUpdate(ctx context.Context, collection *mongo.Collection, updateData UpdateData, filters []FilterGroup) (*mongo.UpdateResult, error) { + filter, err := mqb.buildFilter(filters) + if err != nil { + return nil, err + } + + update := bson.M{"$set": bson.M{}} + for i, col := range updateData.Columns { + if mqb.allowedFields != nil && !mqb.allowedFields[col] { + return nil, fmt.Errorf("disallowed field: %s", col) + } + update["$set"].(bson.M)[col] = updateData.Values[i] + } + + // Handle JSON updates + for col, jsonUpdate := range updateData.JsonUpdates { + if mqb.allowedFields != nil && !mqb.allowedFields[col] { + return nil, fmt.Errorf("disallowed field: %s", col) + } + // Use dot notation for nested JSON updates + update["$set"].(bson.M)[col+"."+jsonUpdate.Path] = jsonUpdate.Value + } + + start := time.Now() + result, err := collection.UpdateMany(ctx, filter, update) + fmt.Printf("[DEBUG] MongoDB Update executed in %v\n", time.Since(start)) + return result, err +} + +// ExecuteDelete executes a MongoDB delete operation +func (mqb *MongoQueryBuilder) ExecuteDelete(ctx context.Context, collection *mongo.Collection, filters []FilterGroup) (*mongo.DeleteResult, error) { + filter, err := mqb.buildFilter(filters) + if err != nil { + return nil, err + } + + start := time.Now() + result, err := collection.DeleteMany(ctx, filter) + fmt.Printf("[DEBUG] MongoDB Delete executed in %v\n", time.Since(start)) + return result, err +} diff --git a/internal/utils/validation/duplicate_validator.go b/internal/utils/validation/duplicate_validator.go index 863c058..01d18f8 100644 --- a/internal/utils/validation/duplicate_validator.go +++ b/internal/utils/validation/duplicate_validator.go @@ -2,140 +2,228 @@ package validation import ( "context" - "database/sql" "fmt" - "time" + + queryUtils "api-service/internal/utils/query" + + "github.com/jmoiron/sqlx" ) -// ValidationConfig holds configuration for duplicate validation -type ValidationConfig struct { - TableName string - IDColumn string - StatusColumn string - DateColumn string - ActiveStatuses []string - AdditionalFields map[string]interface{} +// ============================================================================= +// DYNAMIC VALIDATION RULE +// ============================================================================= + +// ValidationRule mendefinisikan aturan untuk memeriksa duplikat atau kondisi lain. +// Struct ini membuat validator dapat digunakan kembali untuk tabel apa pun. +type ValidationRule struct { + // TableName adalah nama tabel yang akan diperiksa. + TableName string + + // UniqueColumns adalah daftar kolom yang, jika digabungkan, harus unik. + // Contoh: []string{"email"} atau []string{"first_name", "last_name", "dob"} + UniqueColumns []string + + // Conditions adalah filter tambahan yang harus dipenuhi. + // Ini sangat berguna untuk aturan bisnis, seperti "status != 'deleted'". + // Gunakan queryUtils.DynamicFilter untuk fleksibilitas penuh. + Conditions []queryUtils.DynamicFilter + + // ExcludeIDColumn dan ExcludeIDValue digunakan untuk operasi UPDATE, + // untuk memastikan bahwa record tidak membandingkan dirinya sendiri. + ExcludeIDColumn string + ExcludeIDValue interface{} } -// DuplicateValidator provides methods for validating duplicate entries -type DuplicateValidator struct { - db *sql.DB +// NewUniqueFieldRule adalah helper untuk membuat aturan validasi unik untuk satu kolom. +// Ini adalah cara cepat untuk membuat aturan yang paling umum. +func NewUniqueFieldRule(tableName, uniqueColumn string, additionalConditions ...queryUtils.DynamicFilter) ValidationRule { + return ValidationRule{ + TableName: tableName, + UniqueColumns: []string{uniqueColumn}, + Conditions: additionalConditions, + } } -// NewDuplicateValidator creates a new instance of DuplicateValidator -func NewDuplicateValidator(db *sql.DB) *DuplicateValidator { - return &DuplicateValidator{db: db} +// ============================================================================= +// DYNAMIC VALIDATOR +// ============================================================================= + +// DynamicValidator menyediakan metode untuk menjalankan validasi berdasarkan ValidationRule. +// Ini sepenuhnya generik dan tidak terikat pada tabel atau model tertentu. +type DynamicValidator struct { + qb *queryUtils.QueryBuilder } -// ValidateDuplicate checks for duplicate entries based on the provided configuration -func (dv *DuplicateValidator) ValidateDuplicate(ctx context.Context, config ValidationConfig, identifier interface{}) error { - query := fmt.Sprintf(` - SELECT COUNT(*) - FROM %s - WHERE %s = $1 - AND %s = ANY($2) - AND DATE(%s) = CURRENT_DATE - `, config.TableName, config.IDColumn, config.StatusColumn, config.DateColumn) - - var count int - err := dv.db.QueryRowContext(ctx, query, identifier, config.ActiveStatuses).Scan(&count) - if err != nil { - return fmt.Errorf("failed to check duplicate: %w", err) - } - - if count > 0 { - return fmt.Errorf("data with ID %v already exists with active status today", identifier) - } - - return nil +// NewDynamicValidator membuat instance DynamicValidator baru. +func NewDynamicValidator(qb *queryUtils.QueryBuilder) *DynamicValidator { + return &DynamicValidator{qb: qb} } -// ValidateDuplicateWithCustomFields checks for duplicates with additional custom fields -func (dv *DuplicateValidator) ValidateDuplicateWithCustomFields(ctx context.Context, config ValidationConfig, fields map[string]interface{}) error { - whereClause := fmt.Sprintf("%s = ANY($1) AND DATE(%s) = CURRENT_DATE", config.StatusColumn, config.DateColumn) - args := []interface{}{config.ActiveStatuses} - argIndex := 2 - - // Add additional field conditions - for fieldName, fieldValue := range config.AdditionalFields { - whereClause += fmt.Sprintf(" AND %s = $%d", fieldName, argIndex) - args = append(args, fieldValue) - argIndex++ +// Validate menjalankan validasi terhadap aturan yang diberikan. +// `data` adalah map yang berisi nilai untuk kolom yang akan diperiksa (biasanya dari request body). +// Mengembalikan `true` jika ada duplikat yang ditemukan (validasi gagal), `false` jika tidak ada duplikat (validasi berhasil). +func (dv *DynamicValidator) Validate(ctx context.Context, db *sqlx.DB, rule ValidationRule, data map[string]interface{}) (bool, error) { + if len(rule.UniqueColumns) == 0 { + return false, fmt.Errorf("ValidationRule must have at least one UniqueColumn") } - // Add dynamic fields - for fieldName, fieldValue := range fields { - whereClause += fmt.Sprintf(" AND %s = $%d", fieldName, argIndex) - args = append(args, fieldValue) - argIndex++ - } + // 1. Kumpulkan semua filter dari aturan + var allFilters []queryUtils.DynamicFilter - query := fmt.Sprintf("SELECT COUNT(*) FROM %s WHERE %s", config.TableName, whereClause) + // Tambahkan kondisi tambahan (misalnya, status != 'deleted') + allFilters = append(allFilters, rule.Conditions...) - var count int - err := dv.db.QueryRowContext(ctx, query, args...).Scan(&count) - if err != nil { - return fmt.Errorf("failed to check duplicate with custom fields: %w", err) - } - - if count > 0 { - return fmt.Errorf("duplicate entry found with the specified criteria") - } - - return nil -} - -// ValidateOncePerDay ensures only one submission per day for a given identifier -func (dv *DuplicateValidator) ValidateOncePerDay(ctx context.Context, tableName, idColumn, dateColumn string, identifier interface{}) error { - query := fmt.Sprintf(` - SELECT COUNT(*) - FROM %s - WHERE %s = $1 - AND DATE(%s) = CURRENT_DATE - `, tableName, idColumn, dateColumn) - - var count int - err := dv.db.QueryRowContext(ctx, query, identifier).Scan(&count) - if err != nil { - return fmt.Errorf("failed to check daily submission: %w", err) - } - - if count > 0 { - return fmt.Errorf("only one submission allowed per day for ID %v", identifier) - } - - return nil -} - -// GetLastSubmissionTime returns the last submission time for a given identifier -func (dv *DuplicateValidator) GetLastSubmissionTime(ctx context.Context, tableName, idColumn, dateColumn string, identifier interface{}) (*time.Time, error) { - query := fmt.Sprintf(` - SELECT %s - FROM %s - WHERE %s = $1 - ORDER BY %s DESC - LIMIT 1 - `, dateColumn, tableName, idColumn, dateColumn) - - var lastTime time.Time - err := dv.db.QueryRowContext(ctx, query, identifier).Scan(&lastTime) - if err != nil { - if err == sql.ErrNoRows { - return nil, nil // No previous submission + // 2. Bangun filter untuk kolom unik berdasarkan data yang diberikan + for _, colName := range rule.UniqueColumns { + value, exists := data[colName] + if !exists { + // Jika data untuk kolom unik tidak ada, ini adalah kesalahan pemrograman. + return false, fmt.Errorf("data for unique column '%s' not found in provided data map", colName) } - return nil, fmt.Errorf("failed to get last submission time: %w", err) + allFilters = append(allFilters, queryUtils.DynamicFilter{ + Column: colName, + Operator: queryUtils.OpEqual, + Value: value, + }) } - return &lastTime, nil + // 3. Tambahkan filter pengecualian ID (untuk operasi UPDATE) + if rule.ExcludeIDColumn != "" { + allFilters = append(allFilters, queryUtils.DynamicFilter{ + Column: rule.ExcludeIDColumn, + Operator: queryUtils.OpNotEqual, + Value: rule.ExcludeIDValue, + }) + } + + // 4. Bangun dan eksekusi query untuk menghitung jumlah record yang cocok + query := queryUtils.DynamicQuery{ + From: rule.TableName, + Filters: []queryUtils.FilterGroup{{Filters: allFilters, LogicOp: "AND"}}, + } + + count, err := dv.qb.ExecuteCount(ctx, db, query) + if err != nil { + return false, fmt.Errorf("failed to execute validation query for table %s: %w", rule.TableName, err) + } + + // 5. Kembalikan hasil + return count > 0, nil } -// DefaultRetribusiConfig returns default configuration for retribusi validation -func DefaultRetribusiConfig() ValidationConfig { - return ValidationConfig{ - TableName: "data_retribusi", - IDColumn: "id", - StatusColumn: "status", - DateColumn: "date_created", - ActiveStatuses: []string{"active", "draft"}, - } +// ============================================================================= +// CONTOH PENGGUNAAN (UNTUK DITEMPATKAN DI HANDLER ANDA) +// ============================================================================= + +/* +// --- Cara Penggunaan di RetribusiHandler --- + +// 1. Tambahkan DynamicValidator ke struct handler +type RetribusiHandler struct { + // ... + validator *validation.DynamicValidator } + +// 2. Inisialisasi di constructor +func NewRetribusiHandler() *RetribusiHandler { + qb := queryUtils.NewQueryBuilder(queryUtils.DBTypePostgreSQL).SetAllowedColumns(...) + + return &RetribusiHandler{ + // ... + validator: validation.NewDynamicValidator(qb), + } +} + +// 3. Gunakan di CreateRetribusi +func (h *RetribusiHandler) CreateRetribusi(c *gin.Context) { + var req retribusi.RetribusiCreateRequest + // ... bind dan validasi request ... + + // Siapkan aturan validasi: KodeTarif harus unik di antara record yang tidak dihapus. + rule := validation.NewUniqueFieldRule( + "data_retribusi", // Nama tabel + "Kode_tarif", // Kolom yang harus unik + queryUtils.DynamicFilter{ // Kondisi tambahan + Column: "status", + Operator: queryUtils.OpNotEqual, + Value: "deleted", + }, + ) + + // Siapkan data dari request untuk divalidasi + dataToValidate := map[string]interface{}{ + "Kode_tarif": req.KodeTarif, + } + + // Eksekusi validasi + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate Kode Tarif", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "Kode Tarif already exists", fmt.Errorf("duplicate Kode Tarif: %s", req.KodeTarif), http.StatusConflict) + return + } + + // ... lanjutkan proses create ... +} + +// 4. Gunakan di UpdateRetribusi +func (h *RetribusiHandler) UpdateRetribusi(c *gin.Context) { + id := c.Param("id") + var req retribusi.RetribusiUpdateRequest + // ... bind dan validasi request ... + + // Siapkan aturan validasi: KodeTarif harus unik, kecuali untuk record dengan ID ini. + rule := validation.ValidationRule{ + TableName: "data_retribusi", + UniqueColumns: []string{"Kode_tarif"}, + Conditions: []queryUtils.DynamicFilter{ + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + ExcludeIDColumn: "id", // Kecualikan berdasarkan kolom 'id' + ExcludeIDValue: id, // ...dengan nilai ID dari parameter + } + + dataToValidate := map[string]interface{}{ + "Kode_tarif": req.KodeTarif, + } + + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate Kode Tarif", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "Kode Tarif already exists", fmt.Errorf("duplicate Kode Tarif: %s", req.KodeTarif), http.StatusConflict) + return + } + + // ... lanjutkan proses update ... +} + +// --- Contoh Penggunaan untuk Kasus Lain --- + +// Contoh: Validasi kombinasi unik untuk tabel 'users' +// (email dan company_id harus unik bersama-sama) +func (h *UserHandler) CreateUser(c *gin.Context) { + // ... + + rule := validation.ValidationRule{ + TableName: "users", + UniqueColumns: []string{"email", "company_id"}, // Unik komposit + } + + dataToValidate := map[string]interface{}{ + "email": req.Email, + "company_id": req.CompanyID, + } + + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + // ... handle error dan duplicate +} + +*/ diff --git a/tools/general/services-config.yaml b/tools/general/services-config.yaml index 3811f61..b647c33 100644 --- a/tools/general/services-config.yaml +++ b/tools/general/services-config.yaml @@ -5,266 +5,6 @@ global: enable_logging: true services: - retribusi: - name: "Retribusi" - category: "retribusi" - package: "retribusi" - description: "Retribusi service for tariff and billing management" - base_url: "" - timeout: 30 - retry_count: 3 - - endpoints: - retribusi: - description: "Retribusi tariff management" - handler_folder: "retribusi" - handler_file: "retribusi.go" - handler_name: "Retribusi" - table_name: "data_retribusi" - functions: - list: - methods: ["GET"] - path: "/" - get_routes: "/" - get_path: "/" - model: "Retribusi" - response_model: "RetribusiGetResponse" - description: "Get retribusi list with pagination and filters" - summary: "Get Retribusi List" - tags: ["Retribusi"] - require_auth: true - cache_enabled: true - enable_database: true - cache_ttl: 300 - has_pagination: true - has_filter: true - has_search: true - has_stats: true - - get: - methods: ["GET"] - path: "/:id" - get_routes: "/:id" - get_path: "/:id" - model: "Retribusi" - response_model: "RetribusiGetByIDResponse" - description: "Get retribusi by ID" - summary: "Get Retribusi by ID" - tags: ["Retribusi"] - require_auth: true - cache_enabled: true - enable_database: true - cache_ttl: 300 - - dynamic: - methods: ["GET"] - path: "/dynamic" - get_routes: "/dynamic" - get_path: "/dynamic" - model: "Retribusi" - response_model: "RetribusiGetResponse" - description: "Get retribusi with dynamic filtering" - summary: "Get Retribusi Dynamic" - tags: ["Retribusi"] - require_auth: true - cache_enabled: true - enable_database: true - cache_ttl: 300 - has_dynamic: true - - search: - methods: ["GET"] - path: "/search" - get_routes: "/search" - get_path: "/search" - model: "Retribusi" - response_model: "RetribusiGetResponse" - description: "Search retribusi" - summary: "Search Retribusi" - tags: ["Retribusi"] - require_auth: true - cache_enabled: true - enable_database: true - cache_ttl: 300 - has_search: true - - create: - methods: ["POST"] - path: "/" - post_routes: "/" - post_path: "/" - model: "RetribusiCreateRequest" - response_model: "RetribusiCreateResponse" - request_model: "RetribusiCreateRequest" - description: "Create new retribusi" - summary: "Create Retribusi" - tags: ["Retribusi"] - require_auth: true - cache_enabled: false - enable_database: true - cache_ttl: 0 - - update: - methods: ["PUT"] - path: "/:id" - put_routes: "/:id" - put_path: "/:id" - model: "RetribusiUpdateRequest" - response_model: "RetribusiUpdateResponse" - request_model: "RetribusiUpdateRequest" - description: "Update retribusi" - summary: "Update Retribusi" - tags: ["Retribusi"] - require_auth: true - cache_enabled: false - enable_database: true - cache_ttl: 0 - - delete: - methods: ["DELETE"] - path: "/:id" - delete_routes: "/:id" - delete_path: "/:id" - model: "Retribusi" - response_model: "RetribusiDeleteResponse" - description: "Delete retribusi" - summary: "Delete Retribusi" - tags: ["Retribusi"] - require_auth: true - cache_enabled: false - enable_database: true - cache_ttl: 0 - - stats: - methods: ["GET"] - path: "/stats" - get_routes: "/stats" - get_path: "/stats" - model: "AggregateData" - response_model: "AggregateData" - description: "Get retribusi statistics" - summary: "Get Retribusi Stats" - tags: ["Retribusi"] - require_auth: true - cache_enabled: true - enable_database: true - cache_ttl: 180 - has_stats: true - - # Example of another service - user: - name: "User" - category: "user" - package: "user" - description: "User management service" - base_url: "" - timeout: 30 - retry_count: 3 - - endpoints: - user: - description: "User management endpoints" - handler_folder: "retribusi" - handler_file: "user.go" - handler_name: "User" - table_name: "data_user" - functions: - list: - methods: ["GET"] - path: "/" - get_routes: "/" - get_path: "/" - model: "User" - response_model: "UserGetResponse" - description: "Get user list with pagination" - summary: "Get User List" - tags: ["User"] - require_auth: true - cache_enabled: true - enable_database: true - cache_ttl: 300 - has_pagination: true - has_filter: true - has_search: true - - get: - methods: ["GET"] - path: "/:id" - get_routes: "/:id" - get_path: "/:id" - model: "User" - response_model: "UserGetByIDResponse" - description: "Get user by ID" - summary: "Get User by ID" - tags: ["User"] - require_auth: true - cache_enabled: true - enable_database: true - cache_ttl: 300 - - create: - methods: ["POST"] - path: "/" - post_routes: "/" - post_path: "/" - model: "UserCreateRequest" - response_model: "UserCreateResponse" - request_model: "UserCreateRequest" - description: "Create new user" - summary: "Create User" - tags: ["User"] - require_auth: true - cache_enabled: false - enable_database: true - cache_ttl: 0 - - update: - methods: ["PUT"] - path: "/:id" - put_routes: "/:id" - put_path: "/:id" - model: "UserUpdateRequest" - response_model: "UserUpdateResponse" - request_model: "UserUpdateRequest" - description: "Update user" - summary: "Update User" - tags: ["User"] - require_auth: true - cache_enabled: false - enable_database: true - cache_ttl: 0 - - delete: - methods: ["DELETE"] - path: "/:id" - delete_routes: "/:id" - delete_path: "/:id" - model: "User" - response_model: "UserDeleteResponse" - description: "Delete user" - summary: "Delete User" - tags: ["User"] - require_auth: true - cache_enabled: false - enable_database: true - cache_ttl: 0 - - search: - methods: ["GET"] - path: "/search" - get_routes: "/search" - get_path: "/search" - model: "User" - response_model: "UserGetResponse" - description: "Search user" - summary: "Search User" - tags: ["User"] - require_auth: true - cache_enabled: true - enable_database: true - cache_ttl: 300 - has_search: true - schedule: name: "Jadwal Dokter" category: "schedule"