Updat erubhan besar query builder

This commit is contained in:
meninjar
2025-11-02 03:08:38 +00:00
parent 0002cf26be
commit 19324041b8
13 changed files with 2916 additions and 842 deletions

View File

@@ -12,8 +12,7 @@ RUN go build -o main cmd/api/main.go
FROM alpine:3.20.1 AS prod FROM alpine:3.20.1 AS prod
WORKDIR /app WORKDIR /app
COPY --from=build /app/main /app/main COPY --from=build /app/main /app/main
COPY --from=build /app/.env /app/.env EXPOSE 8010
EXPOSE 8080
CMD ["./main"] CMD ["./main"]

View File

@@ -15,15 +15,24 @@ services:
GIN_MODE: release GIN_MODE: release
JWT_SECRET: goRSSA@jay@2025 JWT_SECRET: goRSSA@jay@2025
# Default Database Configuration (PostgreSQL) # Default Database Configuration (PostgreSQL)
DB_CONNECTION: postgres # DB_CONNECTION: postgres
DB_USERNAME: stim # DB_USERNAME: stim
DB_PASSWORD: stim*RS54 # DB_PASSWORD: stim*RS54
DB_HOST: 10.10.123.165 # DB_HOST: 10.10.123.165
DB_DATABASE: satu_db # DB_DATABASE: satu_db
DB_PORT: 5432 # DB_PORT: 5432
DB_SSLMODE: disable # DB_SSLMODE: disable
# satudata Database Configuration (PostgreSQL) # SIMRS Database Configuration (PostgreSQL)
POSTGRES_SIMRS_CONNECTION: postgres
POSTGRES_SIMRS_USERNAME: brawijaya
POSTGRES_SIMRS_PASSWORD: ub*2025
POSTGRES_SIMRS_HOST: 10.10.123.238
POSTGRES_SIMRS_DATABASE: simrs
POSTGRES_SIMRS_PORT: 5432
POSTGRES_SIMRS_SSLMODE: disable
# SATUDATA Database Configuration (PostgreSQL)
POSTGRES_SATUDATA_CONNECTION: postgres POSTGRES_SATUDATA_CONNECTION: postgres
POSTGRES_SATUDATA_USERNAME: stim POSTGRES_SATUDATA_USERNAME: stim
POSTGRES_SATUDATA_PASSWORD: stim*RS54 POSTGRES_SATUDATA_PASSWORD: stim*RS54
@@ -33,14 +42,14 @@ services:
POSTGRES_SATUDATA_SSLMODE: disable POSTGRES_SATUDATA_SSLMODE: disable
# Mongo Database # Mongo Database
MONGODB_MONGOHL7_CONNECTION: mongodb # MONGODB_MONGOHL7_CONNECTION: mongodb
MONGODB_MONGOHL7_HOST: 10.10.123.206 # MONGODB_MONGOHL7_HOST: 10.10.123.206
MONGODB_MONGOHL7_PORT: 27017 # MONGODB_MONGOHL7_PORT: 27017
MONGODB_MONGOHL7_USER: admin # MONGODB_MONGOHL7_USER: admin
MONGODB_MONGOHL7_PASS: stim*rs54 # MONGODB_MONGOHL7_PASS: stim*rs54
MONGODB_MONGOHL7_MASTER: master # MONGODB_MONGOHL7_MASTER: master
MONGODB_MONGOHL7_LOCAL: local # MONGODB_MONGOHL7_LOCAL: local
MONGODB_MONGOHL7_SSLMODE: disable # MONGODB_MONGOHL7_SSLMODE: disable
# MYSQL Antrian Database # MYSQL Antrian Database
# MYSQL_ANTRIAN_CONNECTION: mysql # MYSQL_ANTRIAN_CONNECTION: mysql
@@ -52,21 +61,21 @@ services:
# MYSQL_ANTRIAN_SSLMODE: disable # MYSQL_ANTRIAN_SSLMODE: disable
# MYSQL Medical Database # MYSQL Medical Database
MYSQL_MEDICAL_CONNECTION: mysql # MYSQL_MEDICAL_CONNECTION: mysql
MYSQL_MEDICAL_HOST: 10.10.123.163 # MYSQL_MEDICAL_HOST: 10.10.123.163
MYSQL_MEDICAL_USERNAME: meninjardev # MYSQL_MEDICAL_USERNAME: meninjardev
MYSQL_MEDICAL_PASSWORD: meninjar*RS54 # MYSQL_MEDICAL_PASSWORD: meninjar*RS54
MYSQL_MEDICAL_DATABASE: healtcare_database # MYSQL_MEDICAL_DATABASE: healtcare_database
MYSQL_MEDICAL_PORT: 3306 # MYSQL_MEDICAL_PORT: 3306
MYSQL_MEDICAL_SSLMODE: disable # MYSQL_MEDICAL_SSLMODE: disable
# Keycloak Configuration # KEYCLOAK Configuration
KEYCLOAK_ISSUER: https://auth.rssa.top/realms/sandbox KEYCLOAK_ISSUER: https://auth.rssa.top/realms/sandbox
KEYCLOAK_AUDIENCE: nuxtsim-pendaftaran KEYCLOAK_AUDIENCE: nuxtsim-pendaftaran
KEYCLOAK_JWKS_URL: https://auth.rssa.top/realms/sandbox/protocol/openid-connect/certs KEYCLOAK_JWKS_URL: https://auth.rssa.top/realms/sandbox/protocol/openid-connect/certs
KEYCLOAK_ENABLED: true KEYCLOAK_ENABLED: "true"
# Auth Configuration # AUTH Configuration
AUTH_TYPE: hybrid AUTH_TYPE: hybrid
AUTH_STATIC_TOKENS: token5,token6,token7,token8 AUTH_STATIC_TOKENS: token5,token6,token7,token8
AUTH_FALLBACK_TO: jwt AUTH_FALLBACK_TO: jwt
@@ -88,21 +97,21 @@ services:
BRIDGING_SATUSEHAT_KFA_URL: https://api-satusehat.kemkes.go.id/kfa-v2 BRIDGING_SATUSEHAT_KFA_URL: https://api-satusehat.kemkes.go.id/kfa-v2
# Swagger Configuration # Swagger Configuration
SWAGGER_TITLE: My Custom API Service SWAGGER_TITLE: General API Service
SWAGGER_DESCRIPTION: This is a custom API service for managing various resources SWAGGER_DESCRIPTION: This is a custom API service for managing various resources
SWAGGER_VERSION: 2.0.0 SWAGGER_VERSION: 2.0.0
SWAGGER_CONTACT_NAME: Support Team SWAGGER_CONTACT_NAME: Support Team
SWAGGER_HOST: api.mycompany.com:8080 SWAGGER_HOST: meninjar.dev.rssa.id:8010
SWAGGER_BASE_PATH: /api/v2 SWAGGER_BASE_PATH: /api/v2
SWAGGER_SCHEMES: https SWAGGER_SCHEMES: https
# API Configuration # API Configuration
API_TITLE: API Service UJICOBA API_TITLE: API Service General
API_DESCRIPTION: Dokumentation SWAGGER API_DESCRIPTION: Dokumentation SWAGGER
API_VERSION: 3.0.0 API_VERSION: 3.0.0
# Security # Security
SECURITY_TRUSTED_ORIGINS: http://meninjar.dev.rssa.id:8050,https://yourdomain.com SECURITY_TRUSTED_ORIGINS: http://meninjar.dev.rssa.id:8010,https://yourdomain.com
SECURITY_MAX_INPUT_LENGTH: 500 SECURITY_MAX_INPUT_LENGTH: 500
RATE_LIMIT_REQUESTS_PER_MINUTE: 120 RATE_LIMIT_REQUESTS_PER_MINUTE: 120
REDIS_HOST: localhost REDIS_HOST: localhost

View File

@@ -1,5 +1,4 @@
// Code generated by swaggo/swag. DO NOT EDIT. // Package docs Code generated by swaggo/swag. DO NOT EDIT
package docs package docs
import "github.com/swaggo/swag" import "github.com/swaggo/swag"
@@ -45,7 +44,7 @@ const docTemplate = `{
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"$ref": "#/definitions/models.LoginRequest" "$ref": "#/definitions/api-service_internal_models_auth.LoginRequest"
} }
} }
], ],
@@ -53,7 +52,7 @@ const docTemplate = `{
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.TokenResponse" "$ref": "#/definitions/api-service_internal_models_auth.TokenResponse"
} }
}, },
"400": { "400": {
@@ -96,7 +95,7 @@ const docTemplate = `{
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.User" "$ref": "#/definitions/api-service_internal_models_auth.User"
} }
}, },
"401": { "401": {
@@ -142,7 +141,7 @@ const docTemplate = `{
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.TokenResponse" "$ref": "#/definitions/api-service_internal_models_auth.TokenResponse"
} }
}, },
"400": { "400": {
@@ -241,25 +240,25 @@ const docTemplate = `{
"200": { "200": {
"description": "Success response", "description": "Success response",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiGetByIDResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiGetByIDResponse"
} }
}, },
"400": { "400": {
"description": "Invalid ID format", "description": "Invalid ID format",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"404": { "404": {
"description": "Retribusi not found", "description": "Retribusi not found",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -290,7 +289,7 @@ const docTemplate = `{
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiUpdateRequest" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiUpdateRequest"
} }
} }
], ],
@@ -298,25 +297,25 @@ const docTemplate = `{
"200": { "200": {
"description": "Retribusi updated successfully", "description": "Retribusi updated successfully",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiUpdateResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiUpdateResponse"
} }
}, },
"400": { "400": {
"description": "Bad request or validation error", "description": "Bad request or validation error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"404": { "404": {
"description": "Retribusi not found", "description": "Retribusi not found",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -346,25 +345,25 @@ const docTemplate = `{
"200": { "200": {
"description": "Retribusi deleted successfully", "description": "Retribusi deleted successfully",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiDeleteResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiDeleteResponse"
} }
}, },
"400": { "400": {
"description": "Invalid ID format", "description": "Invalid ID format",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"404": { "404": {
"description": "Retribusi not found", "description": "Retribusi not found",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -434,19 +433,19 @@ const docTemplate = `{
"200": { "200": {
"description": "Success response", "description": "Success response",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiGetResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiGetResponse"
} }
}, },
"400": { "400": {
"description": "Bad request", "description": "Bad request",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -470,7 +469,7 @@ const docTemplate = `{
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiCreateRequest" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiCreateRequest"
} }
} }
], ],
@@ -478,19 +477,19 @@ const docTemplate = `{
"201": { "201": {
"description": "Retribusi created successfully", "description": "Retribusi created successfully",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiCreateResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiCreateResponse"
} }
}, },
"400": { "400": {
"description": "Bad request or validation error", "description": "Bad request or validation error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -547,19 +546,19 @@ const docTemplate = `{
"200": { "200": {
"description": "Success response", "description": "Success response",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiGetResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiGetResponse"
} }
}, },
"400": { "400": {
"description": "Bad request", "description": "Bad request",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -590,13 +589,13 @@ const docTemplate = `{
"200": { "200": {
"description": "Statistics data", "description": "Statistics data",
"schema": { "schema": {
"$ref": "#/definitions/models.AggregateData" "$ref": "#/definitions/api-service_internal_models.AggregateData"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -604,7 +603,7 @@ const docTemplate = `{
}, },
"/api/v1/token/generate": { "/api/v1/token/generate": {
"post": { "post": {
"description": "Generate a JWT token for a user", "description": "Generate a JWT token for testing purposes",
"consumes": [ "consumes": [
"application/json" "application/json"
], ],
@@ -617,12 +616,13 @@ const docTemplate = `{
"summary": "Generate JWT token", "summary": "Generate JWT token",
"parameters": [ "parameters": [
{ {
"description": "User credentials", "description": "Token generation data",
"name": "token", "name": "token",
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"$ref": "#/definitions/models.LoginRequest" "type": "object",
"additionalProperties": true
} }
} }
], ],
@@ -630,7 +630,7 @@ const docTemplate = `{
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.TokenResponse" "$ref": "#/definitions/api-service_internal_models_auth.TokenResponse"
} }
}, },
"400": { "400": {
@@ -641,22 +641,13 @@ const docTemplate = `{
"type": "string" "type": "string"
} }
} }
},
"401": {
"description": "Unauthorized",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
} }
} }
} }
}, },
"/api/v1/token/generate-direct": { "/api/v1/token/generate-direct": {
"post": { "post": {
"description": "Generate a JWT token directly without password verification (for testing)", "description": "Generate a JWT token directly with provided data",
"consumes": [ "consumes": [
"application/json" "application/json"
], ],
@@ -666,18 +657,16 @@ const docTemplate = `{
"tags": [ "tags": [
"Token" "Token"
], ],
"summary": "Generate token directly", "summary": "Generate JWT token directly",
"parameters": [ "parameters": [
{ {
"description": "User info", "description": "Token generation data",
"name": "user", "name": "token",
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"type": "object", "type": "object",
"additionalProperties": { "additionalProperties": true
"type": "string"
}
} }
} }
], ],
@@ -685,7 +674,7 @@ const docTemplate = `{
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.TokenResponse" "$ref": "#/definitions/api-service_internal_models_auth.TokenResponse"
} }
}, },
"400": { "400": {
@@ -702,7 +691,7 @@ const docTemplate = `{
} }
}, },
"definitions": { "definitions": {
"models.AggregateData": { "api-service_internal_models.AggregateData": {
"type": "object", "type": "object",
"properties": { "properties": {
"by_dinas": { "by_dinas": {
@@ -743,7 +732,7 @@ const docTemplate = `{
} }
} }
}, },
"models.ErrorResponse": { "api-service_internal_models.ErrorResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"code": { "code": {
@@ -760,22 +749,7 @@ const docTemplate = `{
} }
} }
}, },
"models.LoginRequest": { "api-service_internal_models.MetaResponse": {
"type": "object",
"required": [
"password",
"username"
],
"properties": {
"password": {
"type": "string"
},
"username": {
"type": "string"
}
}
},
"models.MetaResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"current_page": { "current_page": {
@@ -801,7 +775,7 @@ const docTemplate = `{
} }
} }
}, },
"models.NullableInt32": { "api-service_internal_models.NullableInt32": {
"type": "object", "type": "object",
"properties": { "properties": {
"int32": { "int32": {
@@ -812,7 +786,7 @@ const docTemplate = `{
} }
} }
}, },
"models.NullableString": { "api-service_internal_models.NullableString": {
"type": "object", "type": "object",
"properties": { "properties": {
"string": { "string": {
@@ -823,7 +797,7 @@ const docTemplate = `{
} }
} }
}, },
"models.NullableTime": { "api-service_internal_models.NullableTime": {
"type": "object", "type": "object",
"properties": { "properties": {
"time": { "time": {
@@ -834,21 +808,41 @@ const docTemplate = `{
} }
} }
}, },
"models.TokenResponse": { "api-service_internal_models_auth.LoginRequest": {
"type": "object",
"required": [
"password",
"username"
],
"properties": {
"password": {
"type": "string"
},
"username": {
"type": "string"
}
}
},
"api-service_internal_models_auth.TokenResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"access_token": { "access_token": {
"type": "string" "type": "string"
}, },
"expires_in": { "expires_in": {
"description": "Durasi dalam detik",
"type": "integer" "type": "integer"
}, },
"refresh_token": {
"type": "string"
},
"token_type": { "token_type": {
"description": "Biasanya \"Bearer\"",
"type": "string" "type": "string"
} }
} }
}, },
"models.User": { "api-service_internal_models_auth.User": {
"type": "object", "type": "object",
"properties": { "properties": {
"email": { "email": {
@@ -865,75 +859,75 @@ const docTemplate = `{
} }
} }
}, },
"retribusi.Retribusi": { "api-service_internal_models_retribusi.Retribusi": {
"type": "object", "type": "object",
"properties": { "properties": {
"date_created": { "date_created": {
"$ref": "#/definitions/models.NullableTime" "$ref": "#/definitions/api-service_internal_models.NullableTime"
}, },
"date_updated": { "date_updated": {
"$ref": "#/definitions/models.NullableTime" "$ref": "#/definitions/api-service_internal_models.NullableTime"
}, },
"dinas": { "dinas": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"id": { "id": {
"type": "string" "type": "string"
}, },
"jenis": { "jenis": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"kelompok_obyek": { "kelompok_obyek": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"kode_tarif": { "kode_tarif": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"pelayanan": { "pelayanan": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"rekening_denda": { "rekening_denda": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"rekening_pokok": { "rekening_pokok": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"satuan": { "satuan": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"satuan_overtime": { "satuan_overtime": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"sort": { "sort": {
"$ref": "#/definitions/models.NullableInt32" "$ref": "#/definitions/api-service_internal_models.NullableInt32"
}, },
"status": { "status": {
"type": "string" "type": "string"
}, },
"tarif": { "tarif": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"tarif_overtime": { "tarif_overtime": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"uraian_1": { "uraian_1": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"uraian_2": { "uraian_2": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"uraian_3": { "uraian_3": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"user_created": { "user_created": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"user_updated": { "user_updated": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
} }
} }
}, },
"retribusi.RetribusiCreateRequest": { "api-service_internal_models_retribusi.RetribusiCreateRequest": {
"type": "object", "type": "object",
"required": [ "required": [
"status" "status"
@@ -1009,18 +1003,18 @@ const docTemplate = `{
} }
} }
}, },
"retribusi.RetribusiCreateResponse": { "api-service_internal_models_retribusi.RetribusiCreateResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"data": { "data": {
"$ref": "#/definitions/retribusi.Retribusi" "$ref": "#/definitions/api-service_internal_models_retribusi.Retribusi"
}, },
"message": { "message": {
"type": "string" "type": "string"
} }
} }
}, },
"retribusi.RetribusiDeleteResponse": { "api-service_internal_models_retribusi.RetribusiDeleteResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"id": { "id": {
@@ -1031,38 +1025,38 @@ const docTemplate = `{
} }
} }
}, },
"retribusi.RetribusiGetByIDResponse": { "api-service_internal_models_retribusi.RetribusiGetByIDResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"data": { "data": {
"$ref": "#/definitions/retribusi.Retribusi" "$ref": "#/definitions/api-service_internal_models_retribusi.Retribusi"
}, },
"message": { "message": {
"type": "string" "type": "string"
} }
} }
}, },
"retribusi.RetribusiGetResponse": { "api-service_internal_models_retribusi.RetribusiGetResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"data": { "data": {
"type": "array", "type": "array",
"items": { "items": {
"$ref": "#/definitions/retribusi.Retribusi" "$ref": "#/definitions/api-service_internal_models_retribusi.Retribusi"
} }
}, },
"message": { "message": {
"type": "string" "type": "string"
}, },
"meta": { "meta": {
"$ref": "#/definitions/models.MetaResponse" "$ref": "#/definitions/api-service_internal_models.MetaResponse"
}, },
"summary": { "summary": {
"$ref": "#/definitions/models.AggregateData" "$ref": "#/definitions/api-service_internal_models.AggregateData"
} }
} }
}, },
"retribusi.RetribusiUpdateRequest": { "api-service_internal_models_retribusi.RetribusiUpdateRequest": {
"type": "object", "type": "object",
"required": [ "required": [
"status" "status"
@@ -1138,11 +1132,11 @@ const docTemplate = `{
} }
} }
}, },
"retribusi.RetribusiUpdateResponse": { "api-service_internal_models_retribusi.RetribusiUpdateResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"data": { "data": {
"$ref": "#/definitions/retribusi.Retribusi" "$ref": "#/definitions/api-service_internal_models_retribusi.Retribusi"
}, },
"message": { "message": {
"type": "string" "type": "string"
@@ -1162,6 +1156,8 @@ var SwaggerInfo = &swag.Spec{
Description: "A comprehensive Go API service with Swagger documentation", Description: "A comprehensive Go API service with Swagger documentation",
InfoInstanceName: "swagger", InfoInstanceName: "swagger",
SwaggerTemplate: docTemplate, SwaggerTemplate: docTemplate,
LeftDelim: "{{",
RightDelim: "}}",
} }
func init() { func init() {

View File

@@ -42,7 +42,7 @@
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"$ref": "#/definitions/models.LoginRequest" "$ref": "#/definitions/api-service_internal_models_auth.LoginRequest"
} }
} }
], ],
@@ -50,7 +50,7 @@
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.TokenResponse" "$ref": "#/definitions/api-service_internal_models_auth.TokenResponse"
} }
}, },
"400": { "400": {
@@ -93,7 +93,7 @@
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.User" "$ref": "#/definitions/api-service_internal_models_auth.User"
} }
}, },
"401": { "401": {
@@ -139,7 +139,7 @@
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.TokenResponse" "$ref": "#/definitions/api-service_internal_models_auth.TokenResponse"
} }
}, },
"400": { "400": {
@@ -238,25 +238,25 @@
"200": { "200": {
"description": "Success response", "description": "Success response",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiGetByIDResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiGetByIDResponse"
} }
}, },
"400": { "400": {
"description": "Invalid ID format", "description": "Invalid ID format",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"404": { "404": {
"description": "Retribusi not found", "description": "Retribusi not found",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -287,7 +287,7 @@
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiUpdateRequest" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiUpdateRequest"
} }
} }
], ],
@@ -295,25 +295,25 @@
"200": { "200": {
"description": "Retribusi updated successfully", "description": "Retribusi updated successfully",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiUpdateResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiUpdateResponse"
} }
}, },
"400": { "400": {
"description": "Bad request or validation error", "description": "Bad request or validation error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"404": { "404": {
"description": "Retribusi not found", "description": "Retribusi not found",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -343,25 +343,25 @@
"200": { "200": {
"description": "Retribusi deleted successfully", "description": "Retribusi deleted successfully",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiDeleteResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiDeleteResponse"
} }
}, },
"400": { "400": {
"description": "Invalid ID format", "description": "Invalid ID format",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"404": { "404": {
"description": "Retribusi not found", "description": "Retribusi not found",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -431,19 +431,19 @@
"200": { "200": {
"description": "Success response", "description": "Success response",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiGetResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiGetResponse"
} }
}, },
"400": { "400": {
"description": "Bad request", "description": "Bad request",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -467,7 +467,7 @@
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiCreateRequest" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiCreateRequest"
} }
} }
], ],
@@ -475,19 +475,19 @@
"201": { "201": {
"description": "Retribusi created successfully", "description": "Retribusi created successfully",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiCreateResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiCreateResponse"
} }
}, },
"400": { "400": {
"description": "Bad request or validation error", "description": "Bad request or validation error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -544,19 +544,19 @@
"200": { "200": {
"description": "Success response", "description": "Success response",
"schema": { "schema": {
"$ref": "#/definitions/retribusi.RetribusiGetResponse" "$ref": "#/definitions/api-service_internal_models_retribusi.RetribusiGetResponse"
} }
}, },
"400": { "400": {
"description": "Bad request", "description": "Bad request",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -587,13 +587,13 @@
"200": { "200": {
"description": "Statistics data", "description": "Statistics data",
"schema": { "schema": {
"$ref": "#/definitions/models.AggregateData" "$ref": "#/definitions/api-service_internal_models.AggregateData"
} }
}, },
"500": { "500": {
"description": "Internal server error", "description": "Internal server error",
"schema": { "schema": {
"$ref": "#/definitions/models.ErrorResponse" "$ref": "#/definitions/api-service_internal_models.ErrorResponse"
} }
} }
} }
@@ -601,7 +601,7 @@
}, },
"/api/v1/token/generate": { "/api/v1/token/generate": {
"post": { "post": {
"description": "Generate a JWT token for a user", "description": "Generate a JWT token for testing purposes",
"consumes": [ "consumes": [
"application/json" "application/json"
], ],
@@ -614,12 +614,13 @@
"summary": "Generate JWT token", "summary": "Generate JWT token",
"parameters": [ "parameters": [
{ {
"description": "User credentials", "description": "Token generation data",
"name": "token", "name": "token",
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"$ref": "#/definitions/models.LoginRequest" "type": "object",
"additionalProperties": true
} }
} }
], ],
@@ -627,7 +628,7 @@
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.TokenResponse" "$ref": "#/definitions/api-service_internal_models_auth.TokenResponse"
} }
}, },
"400": { "400": {
@@ -638,22 +639,13 @@
"type": "string" "type": "string"
} }
} }
},
"401": {
"description": "Unauthorized",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
} }
} }
} }
}, },
"/api/v1/token/generate-direct": { "/api/v1/token/generate-direct": {
"post": { "post": {
"description": "Generate a JWT token directly without password verification (for testing)", "description": "Generate a JWT token directly with provided data",
"consumes": [ "consumes": [
"application/json" "application/json"
], ],
@@ -663,18 +655,16 @@
"tags": [ "tags": [
"Token" "Token"
], ],
"summary": "Generate token directly", "summary": "Generate JWT token directly",
"parameters": [ "parameters": [
{ {
"description": "User info", "description": "Token generation data",
"name": "user", "name": "token",
"in": "body", "in": "body",
"required": true, "required": true,
"schema": { "schema": {
"type": "object", "type": "object",
"additionalProperties": { "additionalProperties": true
"type": "string"
}
} }
} }
], ],
@@ -682,7 +672,7 @@
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.TokenResponse" "$ref": "#/definitions/api-service_internal_models_auth.TokenResponse"
} }
}, },
"400": { "400": {
@@ -699,7 +689,7 @@
} }
}, },
"definitions": { "definitions": {
"models.AggregateData": { "api-service_internal_models.AggregateData": {
"type": "object", "type": "object",
"properties": { "properties": {
"by_dinas": { "by_dinas": {
@@ -740,7 +730,7 @@
} }
} }
}, },
"models.ErrorResponse": { "api-service_internal_models.ErrorResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"code": { "code": {
@@ -757,22 +747,7 @@
} }
} }
}, },
"models.LoginRequest": { "api-service_internal_models.MetaResponse": {
"type": "object",
"required": [
"password",
"username"
],
"properties": {
"password": {
"type": "string"
},
"username": {
"type": "string"
}
}
},
"models.MetaResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"current_page": { "current_page": {
@@ -798,7 +773,7 @@
} }
} }
}, },
"models.NullableInt32": { "api-service_internal_models.NullableInt32": {
"type": "object", "type": "object",
"properties": { "properties": {
"int32": { "int32": {
@@ -809,7 +784,7 @@
} }
} }
}, },
"models.NullableString": { "api-service_internal_models.NullableString": {
"type": "object", "type": "object",
"properties": { "properties": {
"string": { "string": {
@@ -820,7 +795,7 @@
} }
} }
}, },
"models.NullableTime": { "api-service_internal_models.NullableTime": {
"type": "object", "type": "object",
"properties": { "properties": {
"time": { "time": {
@@ -831,21 +806,41 @@
} }
} }
}, },
"models.TokenResponse": { "api-service_internal_models_auth.LoginRequest": {
"type": "object",
"required": [
"password",
"username"
],
"properties": {
"password": {
"type": "string"
},
"username": {
"type": "string"
}
}
},
"api-service_internal_models_auth.TokenResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"access_token": { "access_token": {
"type": "string" "type": "string"
}, },
"expires_in": { "expires_in": {
"description": "Durasi dalam detik",
"type": "integer" "type": "integer"
}, },
"refresh_token": {
"type": "string"
},
"token_type": { "token_type": {
"description": "Biasanya \"Bearer\"",
"type": "string" "type": "string"
} }
} }
}, },
"models.User": { "api-service_internal_models_auth.User": {
"type": "object", "type": "object",
"properties": { "properties": {
"email": { "email": {
@@ -862,75 +857,75 @@
} }
} }
}, },
"retribusi.Retribusi": { "api-service_internal_models_retribusi.Retribusi": {
"type": "object", "type": "object",
"properties": { "properties": {
"date_created": { "date_created": {
"$ref": "#/definitions/models.NullableTime" "$ref": "#/definitions/api-service_internal_models.NullableTime"
}, },
"date_updated": { "date_updated": {
"$ref": "#/definitions/models.NullableTime" "$ref": "#/definitions/api-service_internal_models.NullableTime"
}, },
"dinas": { "dinas": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"id": { "id": {
"type": "string" "type": "string"
}, },
"jenis": { "jenis": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"kelompok_obyek": { "kelompok_obyek": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"kode_tarif": { "kode_tarif": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"pelayanan": { "pelayanan": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"rekening_denda": { "rekening_denda": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"rekening_pokok": { "rekening_pokok": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"satuan": { "satuan": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"satuan_overtime": { "satuan_overtime": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"sort": { "sort": {
"$ref": "#/definitions/models.NullableInt32" "$ref": "#/definitions/api-service_internal_models.NullableInt32"
}, },
"status": { "status": {
"type": "string" "type": "string"
}, },
"tarif": { "tarif": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"tarif_overtime": { "tarif_overtime": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"uraian_1": { "uraian_1": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"uraian_2": { "uraian_2": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"uraian_3": { "uraian_3": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"user_created": { "user_created": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
}, },
"user_updated": { "user_updated": {
"$ref": "#/definitions/models.NullableString" "$ref": "#/definitions/api-service_internal_models.NullableString"
} }
} }
}, },
"retribusi.RetribusiCreateRequest": { "api-service_internal_models_retribusi.RetribusiCreateRequest": {
"type": "object", "type": "object",
"required": [ "required": [
"status" "status"
@@ -1006,18 +1001,18 @@
} }
} }
}, },
"retribusi.RetribusiCreateResponse": { "api-service_internal_models_retribusi.RetribusiCreateResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"data": { "data": {
"$ref": "#/definitions/retribusi.Retribusi" "$ref": "#/definitions/api-service_internal_models_retribusi.Retribusi"
}, },
"message": { "message": {
"type": "string" "type": "string"
} }
} }
}, },
"retribusi.RetribusiDeleteResponse": { "api-service_internal_models_retribusi.RetribusiDeleteResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"id": { "id": {
@@ -1028,38 +1023,38 @@
} }
} }
}, },
"retribusi.RetribusiGetByIDResponse": { "api-service_internal_models_retribusi.RetribusiGetByIDResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"data": { "data": {
"$ref": "#/definitions/retribusi.Retribusi" "$ref": "#/definitions/api-service_internal_models_retribusi.Retribusi"
}, },
"message": { "message": {
"type": "string" "type": "string"
} }
} }
}, },
"retribusi.RetribusiGetResponse": { "api-service_internal_models_retribusi.RetribusiGetResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"data": { "data": {
"type": "array", "type": "array",
"items": { "items": {
"$ref": "#/definitions/retribusi.Retribusi" "$ref": "#/definitions/api-service_internal_models_retribusi.Retribusi"
} }
}, },
"message": { "message": {
"type": "string" "type": "string"
}, },
"meta": { "meta": {
"$ref": "#/definitions/models.MetaResponse" "$ref": "#/definitions/api-service_internal_models.MetaResponse"
}, },
"summary": { "summary": {
"$ref": "#/definitions/models.AggregateData" "$ref": "#/definitions/api-service_internal_models.AggregateData"
} }
} }
}, },
"retribusi.RetribusiUpdateRequest": { "api-service_internal_models_retribusi.RetribusiUpdateRequest": {
"type": "object", "type": "object",
"required": [ "required": [
"status" "status"
@@ -1135,11 +1130,11 @@
} }
} }
}, },
"retribusi.RetribusiUpdateResponse": { "api-service_internal_models_retribusi.RetribusiUpdateResponse": {
"type": "object", "type": "object",
"properties": { "properties": {
"data": { "data": {
"$ref": "#/definitions/retribusi.Retribusi" "$ref": "#/definitions/api-service_internal_models_retribusi.Retribusi"
}, },
"message": { "message": {
"type": "string" "type": "string"

View File

@@ -1,6 +1,6 @@
basePath: /api/v1 basePath: /api/v1
definitions: definitions:
models.AggregateData: api-service_internal_models.AggregateData:
properties: properties:
by_dinas: by_dinas:
additionalProperties: additionalProperties:
@@ -27,7 +27,7 @@ definitions:
updated_today: updated_today:
type: integer type: integer
type: object type: object
models.ErrorResponse: api-service_internal_models.ErrorResponse:
properties: properties:
code: code:
type: integer type: integer
@@ -38,17 +38,7 @@ definitions:
timestamp: timestamp:
type: string type: string
type: object type: object
models.LoginRequest: api-service_internal_models.MetaResponse:
properties:
password:
type: string
username:
type: string
required:
- password
- username
type: object
models.MetaResponse:
properties: properties:
current_page: current_page:
type: integer type: integer
@@ -65,37 +55,51 @@ definitions:
total_pages: total_pages:
type: integer type: integer
type: object type: object
models.NullableInt32: api-service_internal_models.NullableInt32:
properties: properties:
int32: int32:
type: integer type: integer
valid: valid:
type: boolean type: boolean
type: object type: object
models.NullableString: api-service_internal_models.NullableString:
properties: properties:
string: string:
type: string type: string
valid: valid:
type: boolean type: boolean
type: object type: object
models.NullableTime: api-service_internal_models.NullableTime:
properties: properties:
time: time:
type: string type: string
valid: valid:
type: boolean type: boolean
type: object type: object
models.TokenResponse: api-service_internal_models_auth.LoginRequest:
properties:
password:
type: string
username:
type: string
required:
- password
- username
type: object
api-service_internal_models_auth.TokenResponse:
properties: properties:
access_token: access_token:
type: string type: string
expires_in: expires_in:
description: Durasi dalam detik
type: integer type: integer
refresh_token:
type: string
token_type: token_type:
description: Biasanya "Bearer"
type: string type: string
type: object type: object
models.User: api-service_internal_models_auth.User:
properties: properties:
email: email:
type: string type: string
@@ -106,52 +110,52 @@ definitions:
username: username:
type: string type: string
type: object type: object
retribusi.Retribusi: api-service_internal_models_retribusi.Retribusi:
properties: properties:
date_created: date_created:
$ref: '#/definitions/models.NullableTime' $ref: '#/definitions/api-service_internal_models.NullableTime'
date_updated: date_updated:
$ref: '#/definitions/models.NullableTime' $ref: '#/definitions/api-service_internal_models.NullableTime'
dinas: dinas:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
id: id:
type: string type: string
jenis: jenis:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
kelompok_obyek: kelompok_obyek:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
kode_tarif: kode_tarif:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
pelayanan: pelayanan:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
rekening_denda: rekening_denda:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
rekening_pokok: rekening_pokok:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
satuan: satuan:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
satuan_overtime: satuan_overtime:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
sort: sort:
$ref: '#/definitions/models.NullableInt32' $ref: '#/definitions/api-service_internal_models.NullableInt32'
status: status:
type: string type: string
tarif: tarif:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
tarif_overtime: tarif_overtime:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
uraian_1: uraian_1:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
uraian_2: uraian_2:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
uraian_3: uraian_3:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
user_created: user_created:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
user_updated: user_updated:
$ref: '#/definitions/models.NullableString' $ref: '#/definitions/api-service_internal_models.NullableString'
type: object type: object
retribusi.RetribusiCreateRequest: api-service_internal_models_retribusi.RetribusiCreateRequest:
properties: properties:
dinas: dinas:
maxLength: 255 maxLength: 255
@@ -208,41 +212,41 @@ definitions:
required: required:
- status - status
type: object type: object
retribusi.RetribusiCreateResponse: api-service_internal_models_retribusi.RetribusiCreateResponse:
properties: properties:
data: data:
$ref: '#/definitions/retribusi.Retribusi' $ref: '#/definitions/api-service_internal_models_retribusi.Retribusi'
message: message:
type: string type: string
type: object type: object
retribusi.RetribusiDeleteResponse: api-service_internal_models_retribusi.RetribusiDeleteResponse:
properties: properties:
id: id:
type: string type: string
message: message:
type: string type: string
type: object type: object
retribusi.RetribusiGetByIDResponse: api-service_internal_models_retribusi.RetribusiGetByIDResponse:
properties: properties:
data: data:
$ref: '#/definitions/retribusi.Retribusi' $ref: '#/definitions/api-service_internal_models_retribusi.Retribusi'
message: message:
type: string type: string
type: object type: object
retribusi.RetribusiGetResponse: api-service_internal_models_retribusi.RetribusiGetResponse:
properties: properties:
data: data:
items: items:
$ref: '#/definitions/retribusi.Retribusi' $ref: '#/definitions/api-service_internal_models_retribusi.Retribusi'
type: array type: array
message: message:
type: string type: string
meta: meta:
$ref: '#/definitions/models.MetaResponse' $ref: '#/definitions/api-service_internal_models.MetaResponse'
summary: summary:
$ref: '#/definitions/models.AggregateData' $ref: '#/definitions/api-service_internal_models.AggregateData'
type: object type: object
retribusi.RetribusiUpdateRequest: api-service_internal_models_retribusi.RetribusiUpdateRequest:
properties: properties:
dinas: dinas:
maxLength: 255 maxLength: 255
@@ -299,10 +303,10 @@ definitions:
required: required:
- status - status
type: object type: object
retribusi.RetribusiUpdateResponse: api-service_internal_models_retribusi.RetribusiUpdateResponse:
properties: properties:
data: data:
$ref: '#/definitions/retribusi.Retribusi' $ref: '#/definitions/api-service_internal_models_retribusi.Retribusi'
message: message:
type: string type: string
type: object type: object
@@ -331,14 +335,14 @@ paths:
name: login name: login
required: true required: true
schema: schema:
$ref: '#/definitions/models.LoginRequest' $ref: '#/definitions/api-service_internal_models_auth.LoginRequest'
produces: produces:
- application/json - application/json
responses: responses:
"200": "200":
description: OK description: OK
schema: schema:
$ref: '#/definitions/models.TokenResponse' $ref: '#/definitions/api-service_internal_models_auth.TokenResponse'
"400": "400":
description: Bad request description: Bad request
schema: schema:
@@ -363,7 +367,7 @@ paths:
"200": "200":
description: OK description: OK
schema: schema:
$ref: '#/definitions/models.User' $ref: '#/definitions/api-service_internal_models_auth.User'
"401": "401":
description: Unauthorized description: Unauthorized
schema: schema:
@@ -395,7 +399,7 @@ paths:
"200": "200":
description: OK description: OK
schema: schema:
$ref: '#/definitions/models.TokenResponse' $ref: '#/definitions/api-service_internal_models_auth.TokenResponse'
"400": "400":
description: Bad request description: Bad request
schema: schema:
@@ -460,19 +464,19 @@ paths:
"200": "200":
description: Retribusi deleted successfully description: Retribusi deleted successfully
schema: schema:
$ref: '#/definitions/retribusi.RetribusiDeleteResponse' $ref: '#/definitions/api-service_internal_models_retribusi.RetribusiDeleteResponse'
"400": "400":
description: Invalid ID format description: Invalid ID format
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
"404": "404":
description: Retribusi not found description: Retribusi not found
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
"500": "500":
description: Internal server error description: Internal server error
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
summary: Delete retribusi summary: Delete retribusi
tags: tags:
- Retribusi - Retribusi
@@ -492,19 +496,19 @@ paths:
"200": "200":
description: Success response description: Success response
schema: schema:
$ref: '#/definitions/retribusi.RetribusiGetByIDResponse' $ref: '#/definitions/api-service_internal_models_retribusi.RetribusiGetByIDResponse'
"400": "400":
description: Invalid ID format description: Invalid ID format
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
"404": "404":
description: Retribusi not found description: Retribusi not found
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
"500": "500":
description: Internal server error description: Internal server error
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
summary: Get Retribusi by ID summary: Get Retribusi by ID
tags: tags:
- Retribusi - Retribusi
@@ -523,26 +527,26 @@ paths:
name: request name: request
required: true required: true
schema: schema:
$ref: '#/definitions/retribusi.RetribusiUpdateRequest' $ref: '#/definitions/api-service_internal_models_retribusi.RetribusiUpdateRequest'
produces: produces:
- application/json - application/json
responses: responses:
"200": "200":
description: Retribusi updated successfully description: Retribusi updated successfully
schema: schema:
$ref: '#/definitions/retribusi.RetribusiUpdateResponse' $ref: '#/definitions/api-service_internal_models_retribusi.RetribusiUpdateResponse'
"400": "400":
description: Bad request or validation error description: Bad request or validation error
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
"404": "404":
description: Retribusi not found description: Retribusi not found
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
"500": "500":
description: Internal server error description: Internal server error
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
summary: Update retribusi summary: Update retribusi
tags: tags:
- Retribusi - Retribusi
@@ -589,15 +593,15 @@ paths:
"200": "200":
description: Success response description: Success response
schema: schema:
$ref: '#/definitions/retribusi.RetribusiGetResponse' $ref: '#/definitions/api-service_internal_models_retribusi.RetribusiGetResponse'
"400": "400":
description: Bad request description: Bad request
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
"500": "500":
description: Internal server error description: Internal server error
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
summary: Get retribusi with pagination and optional aggregation summary: Get retribusi with pagination and optional aggregation
tags: tags:
- Retribusi - Retribusi
@@ -611,22 +615,22 @@ paths:
name: request name: request
required: true required: true
schema: schema:
$ref: '#/definitions/retribusi.RetribusiCreateRequest' $ref: '#/definitions/api-service_internal_models_retribusi.RetribusiCreateRequest'
produces: produces:
- application/json - application/json
responses: responses:
"201": "201":
description: Retribusi created successfully description: Retribusi created successfully
schema: schema:
$ref: '#/definitions/retribusi.RetribusiCreateResponse' $ref: '#/definitions/api-service_internal_models_retribusi.RetribusiCreateResponse'
"400": "400":
description: Bad request or validation error description: Bad request or validation error
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
"500": "500":
description: Internal server error description: Internal server error
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
summary: Create retribusi summary: Create retribusi
tags: tags:
- Retribusi - Retribusi
@@ -664,15 +668,15 @@ paths:
"200": "200":
description: Success response description: Success response
schema: schema:
$ref: '#/definitions/retribusi.RetribusiGetResponse' $ref: '#/definitions/api-service_internal_models_retribusi.RetribusiGetResponse'
"400": "400":
description: Bad request description: Bad request
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
"500": "500":
description: Internal server error description: Internal server error
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
summary: Get retribusi with dynamic filtering summary: Get retribusi with dynamic filtering
tags: tags:
- Retribusi - Retribusi
@@ -692,11 +696,11 @@ paths:
"200": "200":
description: Statistics data description: Statistics data
schema: schema:
$ref: '#/definitions/models.AggregateData' $ref: '#/definitions/api-service_internal_models.AggregateData'
"500": "500":
description: Internal server error description: Internal server error
schema: schema:
$ref: '#/definitions/models.ErrorResponse' $ref: '#/definitions/api-service_internal_models.ErrorResponse'
summary: Get retribusi statistics summary: Get retribusi statistics
tags: tags:
- Retribusi - Retribusi
@@ -704,33 +708,28 @@ paths:
post: post:
consumes: consumes:
- application/json - application/json
description: Generate a JWT token for a user description: Generate a JWT token for testing purposes
parameters: parameters:
- description: User credentials - description: Token generation data
in: body in: body
name: token name: token
required: true required: true
schema: schema:
$ref: '#/definitions/models.LoginRequest' additionalProperties: true
type: object
produces: produces:
- application/json - application/json
responses: responses:
"200": "200":
description: OK description: OK
schema: schema:
$ref: '#/definitions/models.TokenResponse' $ref: '#/definitions/api-service_internal_models_auth.TokenResponse'
"400": "400":
description: Bad request description: Bad request
schema: schema:
additionalProperties: additionalProperties:
type: string type: string
type: object type: object
"401":
description: Unauthorized
schema:
additionalProperties:
type: string
type: object
summary: Generate JWT token summary: Generate JWT token
tags: tags:
- Token - Token
@@ -738,16 +737,14 @@ paths:
post: post:
consumes: consumes:
- application/json - application/json
description: Generate a JWT token directly without password verification (for description: Generate a JWT token directly with provided data
testing)
parameters: parameters:
- description: User info - description: Token generation data
in: body in: body
name: user name: token
required: true required: true
schema: schema:
additionalProperties: additionalProperties: true
type: string
type: object type: object
produces: produces:
- application/json - application/json
@@ -755,14 +752,14 @@ paths:
"200": "200":
description: OK description: OK
schema: schema:
$ref: '#/definitions/models.TokenResponse' $ref: '#/definitions/api-service_internal_models_auth.TokenResponse'
"400": "400":
description: Bad request description: Bad request
schema: schema:
additionalProperties: additionalProperties:
type: string type: string
type: object type: object
summary: Generate token directly summary: Generate JWT token directly
tags: tags:
- Token - Token
schemes: schemes:

2
go.mod
View File

@@ -20,7 +20,6 @@ require (
github.com/gin-contrib/cors v1.7.6 github.com/gin-contrib/cors v1.7.6
github.com/go-playground/validator/v10 v10.27.0 github.com/go-playground/validator/v10 v10.27.0
github.com/go-redis/redis_rate/v10 v10.0.1 github.com/go-redis/redis_rate/v10 v10.0.1
github.com/go-sql-driver/mysql v1.8.1
github.com/jmoiron/sqlx v1.4.0 github.com/jmoiron/sqlx v1.4.0
github.com/joho/godotenv v1.5.1 github.com/joho/godotenv v1.5.1
github.com/lib/pq v1.10.9 github.com/lib/pq v1.10.9
@@ -53,6 +52,7 @@ require (
github.com/go-openapi/swag v0.19.15 // indirect github.com/go-openapi/swag v0.19.15 // indirect
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-sql-driver/mysql v1.8.1 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/goccy/go-json v0.10.5 // indirect github.com/goccy/go-json v0.10.5 // indirect
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect

View File

@@ -20,7 +20,7 @@ import (
type Config struct { type Config struct {
Server ServerConfig Server ServerConfig
Databases map[string]DatabaseConfig Databases map[string]DatabaseConfig
ReadReplicas map[string][]DatabaseConfig // For read replicas ReadReplicas map[string][]DatabaseConfig
Auth AuthConfig Auth AuthConfig
Keycloak KeycloakConfig Keycloak KeycloakConfig
Bpjs BpjsConfig Bpjs BpjsConfig
@@ -65,6 +65,20 @@ type DatabaseConfig struct {
MaxOpenConns int // Max open connections MaxOpenConns int // Max open connections
MaxIdleConns int // Max idle connections MaxIdleConns int // Max idle connections
ConnMaxLifetime time.Duration // Connection max lifetime ConnMaxLifetime time.Duration // Connection max lifetime
// Security settings
RequireSSL bool // Require SSL connection
SSLRootCert string // Path to SSL root certificate
SSLCert string // Path to SSL client certificate
SSLKey string // Path to SSL client key
Timeout time.Duration // Connection timeout
ConnectTimeout time.Duration // Connect timeout
ReadTimeout time.Duration // Read timeout
WriteTimeout time.Duration // Write timeout
StatementTimeout time.Duration // Statement timeout for PostgreSQL
// Connection pool settings
MaxLifetime time.Duration // Maximum amount of time a connection may be reused
MaxIdleTime time.Duration // Maximum amount of time a connection may be idle
HealthCheckPeriod time.Duration // Health check period
} }
type AuthConfig struct { type AuthConfig struct {
@@ -79,6 +93,7 @@ type AuthYAMLConfig struct {
StaticTokens []string `yaml:"static_tokens"` StaticTokens []string `yaml:"static_tokens"`
FallbackTo string `yaml:"fallback_to"` FallbackTo string `yaml:"fallback_to"`
} }
type KeycloakYAMLConfig struct { type KeycloakYAMLConfig struct {
Issuer string `yaml:"issuer"` Issuer string `yaml:"issuer"`
Audience string `yaml:"audience"` Audience string `yaml:"audience"`
@@ -121,6 +136,10 @@ type SecurityConfig struct {
RateLimit RateLimitConfig `mapstructure:"rate_limit"` RateLimit RateLimitConfig `mapstructure:"rate_limit"`
// Input Validation // Input Validation
MaxInputLength int `mapstructure:"max_input_length"` MaxInputLength int `mapstructure:"max_input_length"`
// SQL Injection Protection
SanitizeQueries bool `mapstructure:"sanitize_queries"`
// Connection Security
RequireSecureConnections bool `mapstructure:"require_secure_connections"`
} }
// RateLimitConfig berisi pengaturan untuk rate limiter // RateLimitConfig berisi pengaturan untuk rate limiter
@@ -229,6 +248,8 @@ func LoadConfig() *Config {
DB: getEnvAsInt("REDIS_DB", 0), DB: getEnvAsInt("REDIS_DB", 0),
}, },
}, },
SanitizeQueries: getEnvAsBool("SECURITY_SANITIZE_QUERIES", true),
RequireSecureConnections: getEnvAsBool("SECURITY_REQUIRE_SECURE_CONNECTIONS", false),
}, },
} }
log.Printf("DEBUG: Final Config Object. MaxInputLength is: %d", config.Security.MaxInputLength) log.Printf("DEBUG: Final Config Object. MaxInputLength is: %d", config.Security.MaxInputLength)
@@ -372,32 +393,60 @@ func loadKeycloakConfig() KeycloakConfig {
} }
func (c *Config) loadDatabaseConfigs() { func (c *Config) loadDatabaseConfigs() {
// Simplified approach: Directly load from environment variables // Load PostgreSQL configurations
// This ensures we get the exact values specified in .env
// // Primary database configuration
// c.Databases["default"] = DatabaseConfig{
// Name: "default",
// Type: getEnv("DB_CONNECTION", "postgres"),
// Host: getEnv("DB_HOST", "localhost"),
// Port: getEnvAsInt("DB_PORT", 5432),
// Username: getEnv("DB_USERNAME", ""),
// Password: getEnv("DB_PASSWORD", ""),
// Database: getEnv("DB_DATABASE", "satu_db"),
// Schema: getEnv("DB_SCHEMA", "public"),
// SSLMode: getEnv("DB_SSLMODE", "disable"),
// MaxOpenConns: getEnvAsInt("DB_MAX_OPEN_CONNS", 25),
// MaxIdleConns: getEnvAsInt("DB_MAX_IDLE_CONNS", 25),
// ConnMaxLifetime: parseDuration(getEnv("DB_CONN_MAX_LIFETIME", "5m")),
// }
// SATUDATA database configuration
c.addPostgreSQLConfigs() c.addPostgreSQLConfigs()
// MongoDB database configuration // Load MySQL configurations
c.addMySQLConfigs()
// Load MongoDB configurations
c.addMongoDBConfigs() c.addMongoDBConfigs()
// Legacy support for backward compatibility // Load SQLite configurations
c.addSQLiteConfigs()
// Load custom database configurations from environment variables
c.loadCustomDatabaseConfigs()
// Remove duplicate database configurations
c.removeDuplicateDatabases()
}
func (c *Config) removeDuplicateDatabases() {
// Create a map to track unique database connections
uniqueDBs := make(map[string]DatabaseConfig)
duplicates := make(map[string][]string)
// First pass: identify duplicates
for name, config := range c.Databases {
// Create a unique key based on connection parameters
key := fmt.Sprintf("%s:%s:%d:%s", config.Type, config.Host, config.Port, config.Database)
if existing, exists := uniqueDBs[key]; exists {
// Found a duplicate
if duplicates[key] == nil {
duplicates[key] = []string{existing.Name}
}
duplicates[key] = append(duplicates[key], name)
log.Printf("⚠️ Database %s is a duplicate of %s (same connection parameters)", name, existing.Name)
} else {
uniqueDBs[key] = config
}
}
// Second pass: remove duplicates, keeping the first one
for _, dupNames := range duplicates {
// Keep the first database name, remove the rest
keepName := dupNames[0]
for i := 1; i < len(dupNames); i++ {
removeName := dupNames[i]
delete(c.Databases, removeName)
log.Printf("🗑️ Removed duplicate database configuration: %s (kept: %s)", removeName, keepName)
}
}
}
func (c *Config) loadCustomDatabaseConfigs() {
envVars := os.Environ() envVars := os.Environ()
dbConfigs := make(map[string]map[string]string) dbConfigs := make(map[string]map[string]string)
@@ -437,28 +486,45 @@ func (c *Config) loadDatabaseConfigs() {
continue continue
} }
dbConfig := DatabaseConfig{ dbType := getEnvFromMap(config, "connection", getEnvFromMap(config, "type", "postgres"))
Name: name,
Type: getEnvFromMap(config, "connection", getEnvFromMap(config, "type", "postgres")),
Host: getEnvFromMap(config, "host", "localhost"),
Port: getEnvAsIntFromMap(config, "port", 5432),
Username: getEnvFromMap(config, "username", ""),
Password: getEnvFromMap(config, "password", ""),
Database: getEnvFromMap(config, "database", getEnvFromMap(config, "name", name)),
Schema: getEnvFromMap(config, "schema", "public"),
SSLMode: getEnvFromMap(config, "sslmode", "disable"),
Path: getEnvFromMap(config, "path", ""),
Options: getEnvFromMap(config, "options", ""),
MaxOpenConns: getEnvAsIntFromMap(config, "max_open_conns", 25),
MaxIdleConns: getEnvAsIntFromMap(config, "max_idle_conns", 25),
ConnMaxLifetime: parseDuration(getEnvFromMap(config, "conn_max_lifetime", "5m")),
}
// Skip if username is empty and it's not a system config // Skip if username is empty and it's not a system config
if dbConfig.Username == "" && !strings.HasPrefix(name, "chrome") { username := getEnvFromMap(config, "username", "")
if username == "" && !strings.HasPrefix(name, "chrome") {
continue continue
} }
dbConfig := DatabaseConfig{
Name: name,
Type: dbType,
Host: getEnvFromMap(config, "host", "localhost"),
Port: getEnvAsIntFromMap(config, "port", getDefaultPort(dbType)),
Username: username,
Password: getEnvFromMap(config, "password", ""),
Database: getEnvFromMap(config, "database", getEnvFromMap(config, "name", name)),
Schema: getEnvFromMap(config, "schema", getDefaultSchema(dbType)),
SSLMode: getEnvFromMap(config, "sslmode", getDefaultSSLMode(dbType)),
Path: getEnvFromMap(config, "path", ""),
Options: getEnvFromMap(config, "options", ""),
MaxOpenConns: getEnvAsIntFromMap(config, "max_open_conns", getDefaultMaxOpenConns(dbType)),
MaxIdleConns: getEnvAsIntFromMap(config, "max_idle_conns", getDefaultMaxIdleConns(dbType)),
ConnMaxLifetime: parseDuration(getEnvFromMap(config, "conn_max_lifetime", getDefaultConnMaxLifetime(dbType))),
// Security settings
RequireSSL: getEnvAsBoolFromMap(config, "require_ssl", false),
SSLRootCert: getEnvFromMap(config, "ssl_root_cert", ""),
SSLCert: getEnvFromMap(config, "ssl_cert", ""),
SSLKey: getEnvFromMap(config, "ssl_key", ""),
Timeout: parseDuration(getEnvFromMap(config, "timeout", "30s")),
ConnectTimeout: parseDuration(getEnvFromMap(config, "connect_timeout", "10s")),
ReadTimeout: parseDuration(getEnvFromMap(config, "read_timeout", "30s")),
WriteTimeout: parseDuration(getEnvFromMap(config, "write_timeout", "30s")),
StatementTimeout: parseDuration(getEnvFromMap(config, "statement_timeout", "120s")),
// Connection pool settings
MaxLifetime: parseDuration(getEnvFromMap(config, "max_lifetime", "1h")),
MaxIdleTime: parseDuration(getEnvFromMap(config, "max_idle_time", "5m")),
HealthCheckPeriod: parseDuration(getEnvFromMap(config, "health_check_period", "1m")),
}
c.Databases[name] = dbConfig c.Databases[name] = dbConfig
} }
} }
@@ -499,20 +565,41 @@ func (c *Config) loadReadReplicaConfigs() {
} }
if replicaConfig == nil { if replicaConfig == nil {
// Create new replica config // Get primary DB config as base
primaryDB, exists := c.Databases[dbName]
if !exists {
log.Printf("Warning: Primary database %s not found for replica configuration", dbName)
continue
}
// Create new replica config based on primary
newConfig := DatabaseConfig{ newConfig := DatabaseConfig{
Name: replicaKey, Name: replicaKey,
Type: c.Databases[dbName].Type, Type: primaryDB.Type,
Host: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_HOST", c.Databases[dbName].Host), Host: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_HOST", primaryDB.Host),
Port: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_PORT", c.Databases[dbName].Port), Port: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_PORT", primaryDB.Port),
Username: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_USERNAME", c.Databases[dbName].Username), Username: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_USERNAME", primaryDB.Username),
Password: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_PASSWORD", c.Databases[dbName].Password), Password: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_PASSWORD", primaryDB.Password),
Database: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_DATABASE", c.Databases[dbName].Database), Database: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_DATABASE", primaryDB.Database),
Schema: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SCHEMA", c.Databases[dbName].Schema), Schema: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SCHEMA", primaryDB.Schema),
SSLMode: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SSLMODE", c.Databases[dbName].SSLMode), SSLMode: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SSLMODE", primaryDB.SSLMode),
MaxOpenConns: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_OPEN_CONNS", c.Databases[dbName].MaxOpenConns), MaxOpenConns: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_OPEN_CONNS", primaryDB.MaxOpenConns),
MaxIdleConns: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_IDLE_CONNS", c.Databases[dbName].MaxIdleConns), MaxIdleConns: getEnvAsInt("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_IDLE_CONNS", primaryDB.MaxIdleConns),
ConnMaxLifetime: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_CONN_MAX_LIFETIME", "5m")), ConnMaxLifetime: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_CONN_MAX_LIFETIME", primaryDB.ConnMaxLifetime.String())),
// Security settings
RequireSSL: getEnvAsBool("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_REQUIRE_SSL", primaryDB.RequireSSL),
SSLRootCert: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SSL_ROOT_CERT", primaryDB.SSLRootCert),
SSLCert: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SSL_CERT", primaryDB.SSLCert),
SSLKey: getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_SSL_KEY", primaryDB.SSLKey),
Timeout: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_TIMEOUT", primaryDB.Timeout.String())),
ConnectTimeout: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_CONNECT_TIMEOUT", primaryDB.ConnectTimeout.String())),
ReadTimeout: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_READ_TIMEOUT", primaryDB.ReadTimeout.String())),
WriteTimeout: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_WRITE_TIMEOUT", primaryDB.WriteTimeout.String())),
StatementTimeout: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_STATEMENT_TIMEOUT", primaryDB.StatementTimeout.String())),
// Connection pool settings
MaxLifetime: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_LIFETIME", primaryDB.MaxLifetime.String())),
MaxIdleTime: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_MAX_IDLE_TIME", primaryDB.MaxIdleTime.String())),
HealthCheckPeriod: parseDuration(getEnv("DB_"+strings.ToUpper(dbName)+"_REPLICA_"+replicaIndex+"_HEALTH_CHECK_PERIOD", primaryDB.HealthCheckPeriod.String())),
} }
c.ReadReplicas[dbName] = append(c.ReadReplicas[dbName], newConfig) c.ReadReplicas[dbName] = append(c.ReadReplicas[dbName], newConfig)
replicaConfig = &c.ReadReplicas[dbName][len(c.ReadReplicas[dbName])-1] replicaConfig = &c.ReadReplicas[dbName][len(c.ReadReplicas[dbName])-1]
@@ -540,6 +627,30 @@ func (c *Config) loadReadReplicaConfigs() {
replicaConfig.MaxIdleConns = getEnvAsInt(key, 25) replicaConfig.MaxIdleConns = getEnvAsInt(key, 25)
case "conn_max_lifetime": case "conn_max_lifetime":
replicaConfig.ConnMaxLifetime = parseDuration(value) replicaConfig.ConnMaxLifetime = parseDuration(value)
case "require_ssl":
replicaConfig.RequireSSL = getEnvAsBool(key, false)
case "ssl_root_cert":
replicaConfig.SSLRootCert = value
case "ssl_cert":
replicaConfig.SSLCert = value
case "ssl_key":
replicaConfig.SSLKey = value
case "timeout":
replicaConfig.Timeout = parseDuration(value)
case "connect_timeout":
replicaConfig.ConnectTimeout = parseDuration(value)
case "read_timeout":
replicaConfig.ReadTimeout = parseDuration(value)
case "write_timeout":
replicaConfig.WriteTimeout = parseDuration(value)
case "statement_timeout":
replicaConfig.StatementTimeout = parseDuration(value)
case "max_lifetime":
replicaConfig.MaxLifetime = parseDuration(value)
case "max_idle_time":
replicaConfig.MaxIdleTime = parseDuration(value)
case "health_check_period":
replicaConfig.HealthCheckPeriod = parseDuration(value)
} }
} }
} }
@@ -554,15 +665,29 @@ func (c *Config) addSpecificDatabase(prefix, defaultType string) {
Name: prefix, Name: prefix,
Type: connection, Type: connection,
Host: host, Host: host,
Port: getEnvAsInt(strings.ToUpper(prefix)+"_PORT", 5432), Port: getEnvAsInt(strings.ToUpper(prefix)+"_PORT", getDefaultPort(connection)),
Username: getEnv(strings.ToUpper(prefix)+"_USERNAME", ""), Username: getEnv(strings.ToUpper(prefix)+"_USERNAME", ""),
Password: getEnv(strings.ToUpper(prefix)+"_PASSWORD", ""), Password: getEnv(strings.ToUpper(prefix)+"_PASSWORD", ""),
Database: getEnv(strings.ToUpper(prefix)+"_DATABASE", getEnv(strings.ToUpper(prefix)+"_NAME", prefix)), Database: getEnv(strings.ToUpper(prefix)+"_DATABASE", getEnv(strings.ToUpper(prefix)+"_NAME", prefix)),
Schema: getEnv(strings.ToUpper(prefix)+"_SCHEMA", "public"), Schema: getEnv(strings.ToUpper(prefix)+"_SCHEMA", getDefaultSchema(connection)),
SSLMode: getEnv(strings.ToUpper(prefix)+"_SSLMODE", "disable"), SSLMode: getEnv(strings.ToUpper(prefix)+"_SSLMODE", getDefaultSSLMode(connection)),
MaxOpenConns: getEnvAsInt(strings.ToUpper(prefix)+"_MAX_OPEN_CONNS", 25), MaxOpenConns: getEnvAsInt(strings.ToUpper(prefix)+"_MAX_OPEN_CONNS", getDefaultMaxOpenConns(connection)),
MaxIdleConns: getEnvAsInt(strings.ToUpper(prefix)+"_MAX_IDLE_CONNS", 25), MaxIdleConns: getEnvAsInt(strings.ToUpper(prefix)+"_MAX_IDLE_CONNS", getDefaultMaxIdleConns(connection)),
ConnMaxLifetime: parseDuration(getEnv(strings.ToUpper(prefix)+"_CONN_MAX_LIFETIME", "5m")), ConnMaxLifetime: parseDuration(getEnv(strings.ToUpper(prefix)+"_CONN_MAX_LIFETIME", getDefaultConnMaxLifetime(connection))),
// Security settings
RequireSSL: getEnvAsBool(strings.ToUpper(prefix)+"_REQUIRE_SSL", false),
SSLRootCert: getEnv(strings.ToUpper(prefix)+"_SSL_ROOT_CERT", ""),
SSLCert: getEnv(strings.ToUpper(prefix)+"_SSL_CERT", ""),
SSLKey: getEnv(strings.ToUpper(prefix)+"_SSL_KEY", ""),
Timeout: parseDuration(getEnv(strings.ToUpper(prefix)+"_TIMEOUT", "30s")),
ConnectTimeout: parseDuration(getEnv(strings.ToUpper(prefix)+"_CONNECT_TIMEOUT", "10s")),
ReadTimeout: parseDuration(getEnv(strings.ToUpper(prefix)+"_READ_TIMEOUT", "30s")),
WriteTimeout: parseDuration(getEnv(strings.ToUpper(prefix)+"_WRITE_TIMEOUT", "30s")),
StatementTimeout: parseDuration(getEnv(strings.ToUpper(prefix)+"_STATEMENT_TIMEOUT", "120s")),
// Connection pool settings
MaxLifetime: parseDuration(getEnv(strings.ToUpper(prefix)+"_MAX_LIFETIME", "1h")),
MaxIdleTime: parseDuration(getEnv(strings.ToUpper(prefix)+"_MAX_IDLE_TIME", "5m")),
HealthCheckPeriod: parseDuration(getEnv(strings.ToUpper(prefix)+"_HEALTH_CHECK_PERIOD", "1m")),
} }
c.Databases[prefix] = dbConfig c.Databases[prefix] = dbConfig
} }
@@ -570,25 +695,6 @@ func (c *Config) addSpecificDatabase(prefix, defaultType string) {
// PostgreSQL database // PostgreSQL database
func (c *Config) addPostgreSQLConfigs() { func (c *Config) addPostgreSQLConfigs() {
// SATUDATA database configuration
// defaultPOSTGRESHost := getEnv("POSTGRES_HOST", "localhost")
// if defaultPOSTGRESHost != "" {
// c.Databases["postgres"] = DatabaseConfig{
// Name: "postgres",
// Type: getEnv("POSTGRES_CONNECTION", "postgres"),
// Host: defaultPOSTGRESHost,
// Port: getEnvAsInt("POSTGRES_PORT", 5432),
// Username: getEnv("POSTGRES_USERNAME", ""),
// Password: getEnv("POSTGRES_PASSWORD", ""),
// Database: getEnv("POSTGRES_DATABASE", "postgres"),
// Schema: getEnv("POSTGRES_SCHEMA", "public"),
// SSLMode: getEnv("POSTGRES_SSLMODE", "disable"),
// MaxOpenConns: getEnvAsInt("POSTGRES_MAX_OPEN_CONNS", 25),
// MaxIdleConns: getEnvAsInt("POSTGRES_MAX_IDLE_CONNS", 25),
// ConnMaxLifetime: parseDuration(getEnv("POSTGRES_CONN_MAX_LIFETIME", "5m")),
// }
// }
// Support for custom PostgreSQL configurations with POSTGRES_ prefix // Support for custom PostgreSQL configurations with POSTGRES_ prefix
envVars := os.Environ() envVars := os.Environ()
for _, envVar := range envVars { for _, envVar := range envVars {
@@ -624,6 +730,20 @@ func (c *Config) addPostgreSQLConfigs() {
MaxOpenConns: getEnvAsInt("POSTGRES_MAX_OPEN_CONNS", 25), MaxOpenConns: getEnvAsInt("POSTGRES_MAX_OPEN_CONNS", 25),
MaxIdleConns: getEnvAsInt("POSTGRES_MAX_IDLE_CONNS", 25), MaxIdleConns: getEnvAsInt("POSTGRES_MAX_IDLE_CONNS", 25),
ConnMaxLifetime: parseDuration(getEnv("POSTGRES_CONN_MAX_LIFETIME", "5m")), ConnMaxLifetime: parseDuration(getEnv("POSTGRES_CONN_MAX_LIFETIME", "5m")),
// Security settings
RequireSSL: getEnvAsBool("POSTGRES_"+strings.ToUpper(dbName)+"_REQUIRE_SSL", false),
SSLRootCert: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_SSL_ROOT_CERT", ""),
SSLCert: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_SSL_CERT", ""),
SSLKey: getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_SSL_KEY", ""),
Timeout: parseDuration(getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_TIMEOUT", "30s")),
ConnectTimeout: parseDuration(getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_CONNECT_TIMEOUT", "10s")),
ReadTimeout: parseDuration(getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_READ_TIMEOUT", "30s")),
WriteTimeout: parseDuration(getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_WRITE_TIMEOUT", "30s")),
StatementTimeout: parseDuration(getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_STATEMENT_TIMEOUT", "120s")),
// Connection pool settings
MaxLifetime: parseDuration(getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_MAX_LIFETIME", "1h")),
MaxIdleTime: parseDuration(getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_MAX_IDLE_TIME", "5m")),
HealthCheckPeriod: parseDuration(getEnv("POSTGRES_"+strings.ToUpper(dbName)+"_HEALTH_CHECK_PERIOD", "1m")),
} }
} }
} }
@@ -648,6 +768,19 @@ func (c *Config) addMySQLConfigs() {
MaxOpenConns: getEnvAsInt("MYSQL_MAX_OPEN_CONNS", 25), MaxOpenConns: getEnvAsInt("MYSQL_MAX_OPEN_CONNS", 25),
MaxIdleConns: getEnvAsInt("MYSQL_MAX_IDLE_CONNS", 25), MaxIdleConns: getEnvAsInt("MYSQL_MAX_IDLE_CONNS", 25),
ConnMaxLifetime: parseDuration(getEnv("MYSQL_CONN_MAX_LIFETIME", "5m")), ConnMaxLifetime: parseDuration(getEnv("MYSQL_CONN_MAX_LIFETIME", "5m")),
// Security settings
RequireSSL: getEnvAsBool("MYSQL_REQUIRE_SSL", false),
SSLRootCert: getEnv("MYSQL_SSL_ROOT_CERT", ""),
SSLCert: getEnv("MYSQL_SSL_CERT", ""),
SSLKey: getEnv("MYSQL_SSL_KEY", ""),
Timeout: parseDuration(getEnv("MYSQL_TIMEOUT", "30s")),
ConnectTimeout: parseDuration(getEnv("MYSQL_CONNECT_TIMEOUT", "10s")),
ReadTimeout: parseDuration(getEnv("MYSQL_READ_TIMEOUT", "30s")),
WriteTimeout: parseDuration(getEnv("MYSQL_WRITE_TIMEOUT", "30s")),
// Connection pool settings
MaxLifetime: parseDuration(getEnv("MYSQL_MAX_LIFETIME", "1h")),
MaxIdleTime: parseDuration(getEnv("MYSQL_MAX_IDLE_TIME", "5m")),
HealthCheckPeriod: parseDuration(getEnv("MYSQL_HEALTH_CHECK_PERIOD", "1m")),
} }
} }
@@ -687,6 +820,19 @@ func (c *Config) addMySQLConfigs() {
MaxOpenConns: getEnvAsInt("MYSQL_MAX_OPEN_CONNS", 25), MaxOpenConns: getEnvAsInt("MYSQL_MAX_OPEN_CONNS", 25),
MaxIdleConns: getEnvAsInt("MYSQL_MAX_IDLE_CONNS", 25), MaxIdleConns: getEnvAsInt("MYSQL_MAX_IDLE_CONNS", 25),
ConnMaxLifetime: parseDuration(getEnv("MYSQL_CONN_MAX_LIFETIME", "5m")), ConnMaxLifetime: parseDuration(getEnv("MYSQL_CONN_MAX_LIFETIME", "5m")),
// Security settings
RequireSSL: getEnvAsBool("MYSQL_"+strings.ToUpper(dbName)+"_REQUIRE_SSL", false),
SSLRootCert: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_SSL_ROOT_CERT", ""),
SSLCert: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_SSL_CERT", ""),
SSLKey: getEnv("MYSQL_"+strings.ToUpper(dbName)+"_SSL_KEY", ""),
Timeout: parseDuration(getEnv("MYSQL_"+strings.ToUpper(dbName)+"_TIMEOUT", "30s")),
ConnectTimeout: parseDuration(getEnv("MYSQL_"+strings.ToUpper(dbName)+"_CONNECT_TIMEOUT", "10s")),
ReadTimeout: parseDuration(getEnv("MYSQL_"+strings.ToUpper(dbName)+"_READ_TIMEOUT", "30s")),
WriteTimeout: parseDuration(getEnv("MYSQL_"+strings.ToUpper(dbName)+"_WRITE_TIMEOUT", "30s")),
// Connection pool settings
MaxLifetime: parseDuration(getEnv("MYSQL_"+strings.ToUpper(dbName)+"_MAX_LIFETIME", "1h")),
MaxIdleTime: parseDuration(getEnv("MYSQL_"+strings.ToUpper(dbName)+"_MAX_IDLE_TIME", "5m")),
HealthCheckPeriod: parseDuration(getEnv("MYSQL_"+strings.ToUpper(dbName)+"_HEALTH_CHECK_PERIOD", "1m")),
} }
} }
} }
@@ -712,6 +858,19 @@ func (c *Config) addMongoDBConfigs() {
MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100), MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100),
MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10), MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10),
ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")), ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")),
// Security settings
RequireSSL: getEnvAsBool("MONGODB_REQUIRE_SSL", false),
SSLRootCert: getEnv("MONGODB_SSL_ROOT_CERT", ""),
SSLCert: getEnv("MONGODB_SSL_CERT", ""),
SSLKey: getEnv("MONGODB_SSL_KEY", ""),
Timeout: parseDuration(getEnv("MONGODB_TIMEOUT", "30s")),
ConnectTimeout: parseDuration(getEnv("MONGODB_CONNECT_TIMEOUT", "10s")),
ReadTimeout: parseDuration(getEnv("MONGODB_READ_TIMEOUT", "30s")),
WriteTimeout: parseDuration(getEnv("MONGODB_WRITE_TIMEOUT", "30s")),
// Connection pool settings
MaxLifetime: parseDuration(getEnv("MONGODB_MAX_LIFETIME", "1h")),
MaxIdleTime: parseDuration(getEnv("MONGODB_MAX_IDLE_TIME", "5m")),
HealthCheckPeriod: parseDuration(getEnv("MONGODB_HEALTH_CHECK_PERIOD", "1m")),
} }
} }
@@ -730,6 +889,19 @@ func (c *Config) addMongoDBConfigs() {
MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100), MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100),
MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10), MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10),
ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")), ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")),
// Security settings
RequireSSL: getEnvAsBool("MONGODB_LOCAL_REQUIRE_SSL", false),
SSLRootCert: getEnv("MONGODB_LOCAL_SSL_ROOT_CERT", ""),
SSLCert: getEnv("MONGODB_LOCAL_SSL_CERT", ""),
SSLKey: getEnv("MONGODB_LOCAL_SSL_KEY", ""),
Timeout: parseDuration(getEnv("MONGODB_LOCAL_TIMEOUT", "30s")),
ConnectTimeout: parseDuration(getEnv("MONGODB_LOCAL_CONNECT_TIMEOUT", "10s")),
ReadTimeout: parseDuration(getEnv("MONGODB_LOCAL_READ_TIMEOUT", "30s")),
WriteTimeout: parseDuration(getEnv("MONGODB_LOCAL_WRITE_TIMEOUT", "30s")),
// Connection pool settings
MaxLifetime: parseDuration(getEnv("MONGODB_LOCAL_MAX_LIFETIME", "1h")),
MaxIdleTime: parseDuration(getEnv("MONGODB_LOCAL_MAX_IDLE_TIME", "5m")),
HealthCheckPeriod: parseDuration(getEnv("MONGODB_LOCAL_HEALTH_CHECK_PERIOD", "1m")),
} }
} }
@@ -766,6 +938,19 @@ func (c *Config) addMongoDBConfigs() {
MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100), MaxOpenConns: getEnvAsInt("MONGODB_MAX_OPEN_CONNS", 100),
MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10), MaxIdleConns: getEnvAsInt("MONGODB_MAX_IDLE_CONNS", 10),
ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")), ConnMaxLifetime: parseDuration(getEnv("MONGODB_CONN_MAX_LIFETIME", "30m")),
// Security settings
RequireSSL: getEnvAsBool("MONGODB_"+strings.ToUpper(dbName)+"_REQUIRE_SSL", false),
SSLRootCert: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_SSL_ROOT_CERT", ""),
SSLCert: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_SSL_CERT", ""),
SSLKey: getEnv("MONGODB_"+strings.ToUpper(dbName)+"_SSL_KEY", ""),
Timeout: parseDuration(getEnv("MONGODB_"+strings.ToUpper(dbName)+"_TIMEOUT", "30s")),
ConnectTimeout: parseDuration(getEnv("MONGODB_"+strings.ToUpper(dbName)+"_CONNECT_TIMEOUT", "10s")),
ReadTimeout: parseDuration(getEnv("MONGODB_"+strings.ToUpper(dbName)+"_READ_TIMEOUT", "30s")),
WriteTimeout: parseDuration(getEnv("MONGODB_"+strings.ToUpper(dbName)+"_WRITE_TIMEOUT", "30s")),
// Connection pool settings
MaxLifetime: parseDuration(getEnv("MONGODB_"+strings.ToUpper(dbName)+"_MAX_LIFETIME", "1h")),
MaxIdleTime: parseDuration(getEnv("MONGODB_"+strings.ToUpper(dbName)+"_MAX_IDLE_TIME", "5m")),
HealthCheckPeriod: parseDuration(getEnv("MONGODB_"+strings.ToUpper(dbName)+"_HEALTH_CHECK_PERIOD", "1m")),
} }
} }
} }
@@ -773,6 +958,155 @@ func (c *Config) addMongoDBConfigs() {
} }
} }
// addSQLiteConfigs adds SQLite database configurations from environment variables
func (c *Config) addSQLiteConfigs() {
// Support for custom SQLite configurations with SQLITE_ prefix
envVars := os.Environ()
for _, envVar := range envVars {
parts := strings.SplitN(envVar, "=", 2)
if len(parts) != 2 {
continue
}
key := parts[0]
// Parse SQLite configurations (format: SQLITE_[NAME]_[PROPERTY])
if strings.HasPrefix(key, "SQLITE_") && strings.Contains(key, "_") {
segments := strings.Split(key, "_")
if len(segments) >= 3 {
dbName := strings.ToLower(strings.Join(segments[1:len(segments)-1], "_"))
// Skip if it's a standard SQLite configuration
if dbName == "connection" || dbName == "dev" || dbName == "default" {
continue
}
// Create or update SQLite configuration
if _, exists := c.Databases[dbName]; !exists {
sqlitePath := getEnv("SQLITE_"+strings.ToUpper(dbName)+"_PATH", "")
if sqlitePath != "" {
c.Databases[dbName] = DatabaseConfig{
Name: dbName,
Type: "sqlite",
Path: sqlitePath,
Database: getEnv("SQLITE_"+strings.ToUpper(dbName)+"_DATABASE", dbName),
MaxOpenConns: getEnvAsInt("SQLITE_MAX_OPEN_CONNS", 25),
MaxIdleConns: getEnvAsInt("SQLITE_MAX_IDLE_CONNS", 25),
ConnMaxLifetime: parseDuration(getEnv("SQLITE_CONN_MAX_LIFETIME", "5m")),
// Connection pool settings
MaxLifetime: parseDuration(getEnv("SQLITE_"+strings.ToUpper(dbName)+"_MAX_LIFETIME", "1h")),
MaxIdleTime: parseDuration(getEnv("SQLITE_"+strings.ToUpper(dbName)+"_MAX_IDLE_TIME", "5m")),
HealthCheckPeriod: parseDuration(getEnv("SQLITE_"+strings.ToUpper(dbName)+"_HEALTH_CHECK_PERIOD", "1m")),
}
}
}
}
}
}
}
// Helper functions for getting default values based on database type
func getDefaultPort(dbType string) int {
switch dbType {
case "postgres":
return 5432
case "mysql":
return 3306
case "sqlserver":
return 1433
case "mongodb":
return 27017
case "sqlite":
return 0 // SQLite doesn't use port
default:
return 5432
}
}
func getDefaultSchema(dbType string) string {
switch dbType {
case "postgres":
return "public"
case "mysql":
return ""
case "sqlserver":
return "dbo"
case "mongodb":
return ""
case "sqlite":
return ""
default:
return "public"
}
}
func getDefaultSSLMode(dbType string) string {
switch dbType {
case "postgres":
return "disable"
case "mysql":
return "false"
case "sqlserver":
return "false"
case "mongodb":
return "false"
case "sqlite":
return ""
default:
return "disable"
}
}
func getDefaultMaxOpenConns(dbType string) int {
switch dbType {
case "postgres":
return 25
case "mysql":
return 25
case "sqlserver":
return 25
case "mongodb":
return 100
case "sqlite":
return 1 // SQLite only supports one writer at a time
default:
return 25
}
}
func getDefaultMaxIdleConns(dbType string) int {
switch dbType {
case "postgres":
return 25
case "mysql":
return 25
case "sqlserver":
return 25
case "mongodb":
return 10
case "sqlite":
return 1 // SQLite only supports one writer at a time
default:
return 25
}
}
func getDefaultConnMaxLifetime(dbType string) string {
switch dbType {
case "postgres":
return "5m"
case "mysql":
return "5m"
case "sqlserver":
return "5m"
case "mongodb":
return "30m"
case "sqlite":
return "5m"
default:
return "5m"
}
}
func getEnvFromMap(config map[string]string, key, defaultValue string) string { func getEnvFromMap(config map[string]string, key, defaultValue string) string {
if value, exists := config[key]; exists { if value, exists := config[key]; exists {
return value return value
@@ -789,6 +1123,15 @@ func getEnvAsIntFromMap(config map[string]string, key string, defaultValue int)
return defaultValue return defaultValue
} }
func getEnvAsBoolFromMap(config map[string]string, key string, defaultValue bool) bool {
if value, exists := config[key]; exists {
if boolValue, err := strconv.ParseBool(value); err == nil {
return boolValue
}
}
return defaultValue
}
func parseDuration(durationStr string) time.Duration { func parseDuration(durationStr string) time.Duration {
if duration, err := time.ParseDuration(durationStr); err == nil { if duration, err := time.ParseDuration(durationStr); err == nil {
return duration return duration
@@ -869,16 +1212,19 @@ func (c *Config) Validate() error {
} }
for name, db := range c.Databases { for name, db := range c.Databases {
if db.Host == "" { if db.Type != "sqlite" && db.Host == "" {
errs = append(errs, fmt.Sprintf("database host is required for %s", name)) errs = append(errs, fmt.Sprintf("database host is required for %s", name))
} }
if db.Username == "" { if db.Type != "sqlite" && db.Username == "" {
errs = append(errs, fmt.Sprintf("database username is required for %s", name)) errs = append(errs, fmt.Sprintf("database username is required for %s", name))
} }
if db.Password == "" { if db.Type != "sqlite" && db.Password == "" {
errs = append(errs, fmt.Sprintf("database password is required for %s", name)) errs = append(errs, fmt.Sprintf("database password is required for %s", name))
} }
if db.Database == "" { if db.Type == "sqlite" && db.Path == "" {
errs = append(errs, fmt.Sprintf("database path is required for SQLite database %s", name))
}
if db.Type != "sqlite" && db.Database == "" {
errs = append(errs, fmt.Sprintf("database name is required for %s", name)) errs = append(errs, fmt.Sprintf("database name is required for %s", name))
} }
} }

View File

@@ -2,24 +2,22 @@ package database
import ( import (
"context" "context"
"crypto/tls"
"database/sql" "database/sql"
"fmt" "fmt"
"log" // Import runtime package "log"
// Import debug package
"strconv" "strconv"
"sync" "sync"
"time" "time"
"api-service/internal/config" "api-service/internal/config"
_ "github.com/jackc/pgx/v5" // Import pgx driver _ "github.com/jackc/pgx/v5"
"github.com/jmoiron/sqlx"
"github.com/lib/pq" "github.com/lib/pq"
_ "gorm.io/driver/postgres" // Import GORM PostgreSQL driver _ "gorm.io/driver/mysql"
_ "gorm.io/driver/postgres"
_ "github.com/go-sql-driver/mysql" // MySQL driver for database/sql _ "gorm.io/driver/sqlserver"
_ "gorm.io/driver/mysql" // GORM MySQL driver
_ "gorm.io/driver/sqlserver" // GORM SQL Server driver
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options" "go.mongodb.org/mongo-driver/mongo/options"
@@ -40,27 +38,31 @@ const (
type Service interface { type Service interface {
Health() map[string]map[string]string Health() map[string]map[string]string
GetDB(name string) (*sql.DB, error) GetDB(name string) (*sql.DB, error)
GetSQLXDB(name string) (*sqlx.DB, error) // Tambahkan metode ini
GetMongoClient(name string) (*mongo.Client, error) GetMongoClient(name string) (*mongo.Client, error)
GetReadDB(name string) (*sql.DB, error) // For read replicas GetReadDB(name string) (*sql.DB, error)
Close() error Close() error
ListDBs() []string ListDBs() []string
GetDBType(name string) (DatabaseType, error) GetDBType(name string) (DatabaseType, error)
// Tambahkan method untuk WebSocket notifications
ListenForChanges(ctx context.Context, dbName string, channels []string, callback func(string, string)) error ListenForChanges(ctx context.Context, dbName string, channels []string, callback func(string, string)) error
NotifyChange(dbName, channel, payload string) error NotifyChange(dbName, channel, payload string) error
GetPrimaryDB(name string) (*sql.DB, error) // Helper untuk get primary DB GetPrimaryDB(name string) (*sql.DB, error)
ExecuteQuery(ctx context.Context, dbName string, query string, args ...interface{}) (*sql.Rows, error)
ExecuteQueryRow(ctx context.Context, dbName string, query string, args ...interface{}) *sql.Row
Exec(ctx context.Context, dbName string, query string, args ...interface{}) (sql.Result, error)
} }
type service struct { type service struct {
sqlDatabases map[string]*sql.DB sqlDatabases map[string]*sql.DB
mongoClients map[string]*mongo.Client sqlxDatabases map[string]*sqlx.DB // Tambahkan map untuk sqlx.DB
readReplicas map[string][]*sql.DB // Read replicas for load balancing mongoClients map[string]*mongo.Client
configs map[string]config.DatabaseConfig readReplicas map[string][]*sql.DB
readConfigs map[string][]config.DatabaseConfig configs map[string]config.DatabaseConfig
mu sync.RWMutex readConfigs map[string][]config.DatabaseConfig
readBalancer map[string]int // Round-robin counter for read replicas mu sync.RWMutex
listeners map[string]*pq.Listener // Tambahkan untuk tracking listeners readBalancer map[string]int
listenersMu sync.RWMutex listeners map[string]*pq.Listener
listenersMu sync.RWMutex
} }
var ( var (
@@ -72,18 +74,17 @@ var (
func New(cfg *config.Config) Service { func New(cfg *config.Config) Service {
once.Do(func() { once.Do(func() {
dbManager = &service{ dbManager = &service{
sqlDatabases: make(map[string]*sql.DB), sqlDatabases: make(map[string]*sql.DB),
mongoClients: make(map[string]*mongo.Client), sqlxDatabases: make(map[string]*sqlx.DB), // Inisialisasi map sqlx
readReplicas: make(map[string][]*sql.DB), mongoClients: make(map[string]*mongo.Client),
configs: make(map[string]config.DatabaseConfig), readReplicas: make(map[string][]*sql.DB),
readConfigs: make(map[string][]config.DatabaseConfig), configs: make(map[string]config.DatabaseConfig),
readBalancer: make(map[string]int), readConfigs: make(map[string][]config.DatabaseConfig),
listeners: make(map[string]*pq.Listener), readBalancer: make(map[string]int),
listeners: make(map[string]*pq.Listener),
} }
log.Println("Initializing database service...") // Log when the initialization starts log.Println("Initializing database service...")
// log.Printf("Current Goroutine ID: %d", runtime.NumGoroutine()) // Log the number of goroutines
// log.Printf("Stack Trace: %s", debug.Stack()) // Log the stack trace
dbManager.loadFromConfig(cfg) dbManager.loadFromConfig(cfg)
// Initialize all databases // Initialize all databases
@@ -125,14 +126,17 @@ func (s *service) addDatabase(name string, config config.DatabaseConfig) error {
s.mu.Lock() s.mu.Lock()
defer s.mu.Unlock() defer s.mu.Unlock()
log.Printf("=== Database Connection Debug ===") // Check for duplicate database connections
// log.Printf("Database: %s", name) for existingName, existingConfig := range s.configs {
// log.Printf("Type: %s", config.Type) if existingName != name &&
// log.Printf("Host: %s", config.Host) existingConfig.Host == config.Host &&
// log.Printf("Port: %d", config.Port) existingConfig.Port == config.Port &&
// log.Printf("Database: %s", config.Database) existingConfig.Database == config.Database &&
// log.Printf("Username: %s", config.Username) existingConfig.Type == config.Type {
// log.Printf("SSLMode: %s", config.SSLMode) log.Printf("⚠️ Database %s appears to be a duplicate of %s (same host:port:database), skipping connection", name, existingName)
return nil
}
}
var db *sql.DB var db *sql.DB
var err error var err error
@@ -156,12 +160,11 @@ func (s *service) addDatabase(name string, config config.DatabaseConfig) error {
if err != nil { if err != nil {
log.Printf("❌ Error connecting to database %s: %v", name, err) log.Printf("❌ Error connecting to database %s: %v", name, err)
log.Printf(" Database: %s@%s:%d/%s", config.Username, config.Host, config.Port, config.Database)
return err return err
} }
log.Printf("✅ Successfully connected to database: %s", name) log.Printf("✅ Successfully connected to database: %s", name)
return s.configureSQLDB(name, db, config.MaxOpenConns, config.MaxIdleConns, config.ConnMaxLifetime) return s.configureSQLDB(name, db, config)
} }
func (s *service) addReadReplica(name string, index int, config config.DatabaseConfig) error { func (s *service) addReadReplica(name string, index int, config config.DatabaseConfig) error {
@@ -206,19 +209,32 @@ func (s *service) addReadReplica(name string, index int, config config.DatabaseC
} }
func (s *service) openPostgresConnection(config config.DatabaseConfig) (*sql.DB, error) { func (s *service) openPostgresConnection(config config.DatabaseConfig) (*sql.DB, error) {
connStr := fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=%s", // Build connection string with security parameters
config.Username, // Convert timeout durations to seconds for pgx
config.Password, connectTimeoutSec := int(config.ConnectTimeout.Seconds())
statementTimeoutSec := int(config.StatementTimeout.Seconds())
connStr := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=%s connect_timeout=%d statement_timeout=%d",
config.Host, config.Host,
config.Port, config.Port,
config.Username,
config.Password,
config.Database, config.Database,
config.SSLMode, config.SSLMode,
connectTimeoutSec,
statementTimeoutSec,
) )
if config.Schema != "" { if config.Schema != "" {
connStr += "&search_path=" + config.Schema connStr += " search_path=" + config.Schema
} }
// Add SSL configuration if required
if config.RequireSSL {
connStr += " sslcert=" + config.SSLCert + " sslkey=" + config.SSLKey + " sslrootcert=" + config.SSLRootCert
}
// Open connection using standard database/sql interface
db, err := sql.Open("pgx", connStr) db, err := sql.Open("pgx", connStr)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to open PostgreSQL connection: %w", err) return nil, fmt.Errorf("failed to open PostgreSQL connection: %w", err)
@@ -228,14 +244,33 @@ func (s *service) openPostgresConnection(config config.DatabaseConfig) (*sql.DB,
} }
func (s *service) openMySQLConnection(config config.DatabaseConfig) (*sql.DB, error) { func (s *service) openMySQLConnection(config config.DatabaseConfig) (*sql.DB, error) {
connStr := fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?parseTime=true", // Build connection string with security parameters
connStr := fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?parseTime=true&timeout=%s&readTimeout=%s&writeTimeout=%s",
config.Username, config.Username,
config.Password, config.Password,
config.Host, config.Host,
config.Port, config.Port,
config.Database, config.Database,
config.Timeout,
config.ReadTimeout,
config.WriteTimeout,
) )
// Add SSL configuration if required
if config.RequireSSL {
connStr += "&tls=true"
if config.SSLRootCert != "" {
connStr += "&ssl-ca=" + config.SSLRootCert
}
if config.SSLCert != "" {
connStr += "&ssl-cert=" + config.SSLCert
}
if config.SSLKey != "" {
connStr += "&ssl-key=" + config.SSLKey
}
}
// Open connection
db, err := sql.Open("mysql", connStr) db, err := sql.Open("mysql", connStr)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to open MySQL connection: %w", err) return nil, fmt.Errorf("failed to open MySQL connection: %w", err)
@@ -245,14 +280,30 @@ func (s *service) openMySQLConnection(config config.DatabaseConfig) (*sql.DB, er
} }
func (s *service) openSQLServerConnection(config config.DatabaseConfig) (*sql.DB, error) { func (s *service) openSQLServerConnection(config config.DatabaseConfig) (*sql.DB, error) {
connStr := fmt.Sprintf("sqlserver://%s:%s@%s:%d?database=%s", // Build connection string with security parameters
// Convert timeout to seconds for SQL Server
connectTimeoutSec := int(config.ConnectTimeout.Seconds())
connStr := fmt.Sprintf("sqlserver://%s:%s@%s:%d?database=%s&connection timeout=%d",
config.Username, config.Username,
config.Password, config.Password,
config.Host, config.Host,
config.Port, config.Port,
config.Database, config.Database,
connectTimeoutSec,
) )
// Add SSL configuration if required
if config.RequireSSL {
connStr += "&encrypt=true"
if config.SSLRootCert != "" {
connStr += "&trustServerCertificate=false"
} else {
connStr += "&trustServerCertificate=true"
}
}
// Open connection
db, err := sql.Open("sqlserver", connStr) db, err := sql.Open("sqlserver", connStr)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to open SQL Server connection: %w", err) return nil, fmt.Errorf("failed to open SQL Server connection: %w", err)
@@ -262,23 +313,26 @@ func (s *service) openSQLServerConnection(config config.DatabaseConfig) (*sql.DB
} }
func (s *service) openSQLiteConnection(config config.DatabaseConfig) (*sql.DB, error) { func (s *service) openSQLiteConnection(config config.DatabaseConfig) (*sql.DB, error) {
dbPath := config.Path // Open connection
if dbPath == "" { db, err := sql.Open("sqlite3", config.Path)
dbPath = fmt.Sprintf("./data/%s.db", config.Database)
}
db, err := sql.Open("sqlite3", dbPath)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to open SQLite connection: %w", err) return nil, fmt.Errorf("failed to open SQLite connection: %w", err)
} }
// Enable foreign key constraints and WAL mode for better security and performance
_, err = db.Exec("PRAGMA foreign_keys = ON; PRAGMA journal_mode = WAL;")
if err != nil {
return nil, fmt.Errorf("failed to configure SQLite: %w", err)
}
return db, nil return db, nil
} }
func (s *service) addMongoDB(name string, config config.DatabaseConfig) error { func (s *service) addMongoDB(name string, config config.DatabaseConfig) error {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) ctx, cancel := context.WithTimeout(context.Background(), config.Timeout)
defer cancel() defer cancel()
// Build MongoDB URI with authentication and TLS options
uri := fmt.Sprintf("mongodb://%s:%s@%s:%d/%s", uri := fmt.Sprintf("mongodb://%s:%s@%s:%d/%s",
config.Username, config.Username,
config.Password, config.Password,
@@ -287,23 +341,45 @@ func (s *service) addMongoDB(name string, config config.DatabaseConfig) error {
config.Database, config.Database,
) )
client, err := mongo.Connect(ctx, options.Client().ApplyURI(uri)) // Configure client options with security settings
clientOptions := options.Client().ApplyURI(uri)
// Set TLS configuration if needed
if config.RequireSSL {
clientOptions.SetTLSConfig(&tls.Config{
InsecureSkipVerify: config.SSLMode == "require",
MinVersion: tls.VersionTLS12,
})
}
// Set connection timeout
clientOptions.SetConnectTimeout(config.ConnectTimeout)
clientOptions.SetServerSelectionTimeout(config.Timeout)
client, err := mongo.Connect(ctx, clientOptions)
if err != nil { if err != nil {
return fmt.Errorf("failed to connect to MongoDB: %w", err) return fmt.Errorf("failed to connect to MongoDB: %w", err)
} }
// Ping to verify connection
if err := client.Ping(ctx, nil); err != nil {
return fmt.Errorf("failed to ping MongoDB: %w", err)
}
s.mongoClients[name] = client s.mongoClients[name] = client
log.Printf("Successfully connected to MongoDB: %s", name) log.Printf("Successfully connected to MongoDB: %s", name)
return nil return nil
} }
func (s *service) configureSQLDB(name string, db *sql.DB, maxOpenConns, maxIdleConns int, connMaxLifetime time.Duration) error { func (s *service) configureSQLDB(name string, db *sql.DB, config config.DatabaseConfig) error {
db.SetMaxOpenConns(maxOpenConns) // Set connection pool limits
db.SetMaxIdleConns(maxIdleConns) db.SetMaxOpenConns(config.MaxOpenConns)
db.SetConnMaxLifetime(connMaxLifetime) db.SetMaxIdleConns(config.MaxIdleConns)
db.SetConnMaxLifetime(config.ConnMaxLifetime)
db.SetConnMaxIdleTime(config.MaxIdleTime)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) ctx, cancel := context.WithTimeout(context.Background(), config.Timeout)
defer cancel() defer cancel()
if err := db.PingContext(ctx); err != nil { if err := db.PingContext(ctx); err != nil {
@@ -312,6 +388,28 @@ func (s *service) configureSQLDB(name string, db *sql.DB, maxOpenConns, maxIdleC
} }
s.sqlDatabases[name] = db s.sqlDatabases[name] = db
// PERUBAHAN: Tambahkan pembuatan sqlx.DB dari sql.DB yang sudah ada
dbType := DatabaseType(config.Type)
var driverName string
switch dbType {
case Postgres:
driverName = "pgx"
case MySQL:
driverName = "mysql"
case SQLServer:
driverName = "sqlserver"
case SQLite:
driverName = "sqlite3"
default:
return fmt.Errorf("unsupported database type for sqlx: %s", config.Type)
}
// Buat sqlx.DB dari sql.DB yang sudah ada
sqlxDB := sqlx.NewDb(db, driverName)
s.sqlxDatabases[name] = sqlxDB
log.Printf("Successfully connected to SQL database: %s", name) log.Printf("Successfully connected to SQL database: %s", name)
return nil return nil
@@ -439,26 +537,27 @@ func (s *service) Health() map[string]map[string]string {
// GetDB returns a specific SQL database connection by name // GetDB returns a specific SQL database connection by name
func (s *service) GetDB(name string) (*sql.DB, error) { func (s *service) GetDB(name string) (*sql.DB, error) {
log.Printf("Attempting to get database connection for: %s", name)
s.mu.RLock() s.mu.RLock()
defer s.mu.RUnlock() defer s.mu.RUnlock()
db, exists := s.sqlDatabases[name] db, exists := s.sqlDatabases[name]
if !exists { if !exists {
log.Printf("Error: database %s not found", name) // Log the error
return nil, fmt.Errorf("database %s not found", name) return nil, fmt.Errorf("database %s not found", name)
} }
log.Printf("Current connection pool state for %s: Open: %d, In Use: %d, Idle: %d", return db, nil
name, db.Stats().OpenConnections, db.Stats().InUse, db.Stats().Idle) }
// PERUBAHAN: Tambahkan metode GetSQLXDB
// GetSQLXDB returns a specific SQLX database connection by name
func (s *service) GetSQLXDB(name string) (*sqlx.DB, error) {
s.mu.RLock() s.mu.RLock()
defer s.mu.RUnlock() defer s.mu.RUnlock()
// db, exists := s.sqlDatabases[name] db, exists := s.sqlxDatabases[name]
// if !exists { if !exists {
// log.Printf("Error: database %s not found", name) // Log the error return nil, fmt.Errorf("database %s not found", name)
// return nil, fmt.Errorf("database %s not found", name) }
// }
return db, nil return db, nil
} }
@@ -537,6 +636,13 @@ func (s *service) Close() error {
var errs []error var errs []error
// Close listeners first
for name, listener := range s.listeners {
if err := listener.Close(); err != nil {
errs = append(errs, fmt.Errorf("failed to close listener for %s: %w", name, err))
}
}
for name, db := range s.sqlDatabases { for name, db := range s.sqlDatabases {
if err := db.Close(); err != nil { if err := db.Close(); err != nil {
errs = append(errs, fmt.Errorf("failed to close database %s: %w", name, err)) errs = append(errs, fmt.Errorf("failed to close database %s: %w", name, err))
@@ -566,10 +672,12 @@ func (s *service) Close() error {
} }
s.sqlDatabases = make(map[string]*sql.DB) s.sqlDatabases = make(map[string]*sql.DB)
s.sqlxDatabases = make(map[string]*sqlx.DB) // Reset map sqlx
s.mongoClients = make(map[string]*mongo.Client) s.mongoClients = make(map[string]*mongo.Client)
s.readReplicas = make(map[string][]*sql.DB) s.readReplicas = make(map[string][]*sql.DB)
s.configs = make(map[string]config.DatabaseConfig) s.configs = make(map[string]config.DatabaseConfig)
s.readConfigs = make(map[string][]config.DatabaseConfig) s.readConfigs = make(map[string][]config.DatabaseConfig)
s.listeners = make(map[string]*pq.Listener)
if len(errs) > 0 { if len(errs) > 0 {
return fmt.Errorf("errors closing databases: %v", errs) return fmt.Errorf("errors closing databases: %v", errs)
@@ -583,6 +691,51 @@ func (s *service) GetPrimaryDB(name string) (*sql.DB, error) {
return s.GetDB(name) return s.GetDB(name)
} }
// ExecuteQuery executes a query with parameters and returns rows
func (s *service) ExecuteQuery(ctx context.Context, dbName string, query string, args ...interface{}) (*sql.Rows, error) {
db, err := s.GetDB(dbName)
if err != nil {
return nil, fmt.Errorf("failed to get database %s: %w", dbName, err)
}
// Use parameterized queries to prevent SQL injection
rows, err := db.QueryContext(ctx, query, args...)
if err != nil {
return nil, fmt.Errorf("failed to execute query: %w", err)
}
return rows, nil
}
// ExecuteQueryRow executes a query with parameters and returns a single row
func (s *service) ExecuteQueryRow(ctx context.Context, dbName string, query string, args ...interface{}) *sql.Row {
db, err := s.GetDB(dbName)
if err != nil {
// Return an empty row with error
row := &sql.Row{}
return row
}
// Use parameterized queries to prevent SQL injection
return db.QueryRowContext(ctx, query, args...)
}
// Exec executes a query with parameters and returns the result
func (s *service) Exec(ctx context.Context, dbName string, query string, args ...interface{}) (sql.Result, error) {
db, err := s.GetDB(dbName)
if err != nil {
return nil, fmt.Errorf("failed to get database %s: %w", dbName, err)
}
// Use parameterized queries to prevent SQL injection
result, err := db.ExecContext(ctx, query, args...)
if err != nil {
return nil, fmt.Errorf("failed to execute query: %w", err)
}
return result, nil
}
// ListenForChanges implements PostgreSQL LISTEN/NOTIFY for real-time updates // ListenForChanges implements PostgreSQL LISTEN/NOTIFY for real-time updates
func (s *service) ListenForChanges(ctx context.Context, dbName string, channels []string, callback func(string, string)) error { func (s *service) ListenForChanges(ctx context.Context, dbName string, channels []string, callback func(string, string)) error {
s.mu.RLock() s.mu.RLock()
@@ -599,13 +752,17 @@ func (s *service) ListenForChanges(ctx context.Context, dbName string, channels
} }
// Create connection string for listener // Create connection string for listener
connStr := fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=%s", // Convert timeout to seconds for pq
connectTimeoutSec := int(config.ConnectTimeout.Seconds())
connStr := fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=%s&connect_timeout=%d",
config.Username, config.Username,
config.Password, config.Password,
config.Host, config.Host,
config.Port, config.Port,
config.Database, config.Database,
config.SSLMode, config.SSLMode,
connectTimeoutSec,
) )
// Create listener // Create listener
@@ -687,7 +844,7 @@ func (s *service) NotifyChange(dbName, channel, payload string) error {
return fmt.Errorf("NOTIFY only supported for PostgreSQL databases") return fmt.Errorf("NOTIFY only supported for PostgreSQL databases")
} }
// Execute NOTIFY // Execute NOTIFY with parameterized query to prevent SQL injection
query := "SELECT pg_notify($1, $2)" query := "SELECT pg_notify($1, $2)"
_, err = db.Exec(query, channel, payload) _, err = db.Exec(query, channel, payload)
if err != nil { if err != nil {

View File

File diff suppressed because it is too large Load Diff

View File

@@ -472,9 +472,9 @@ func (qb *QueryBuilder) BuildQuery(query DynamicQuery) (string, []interface{}, e
finalSQL := strings.Join(queryParts, " ") finalSQL := strings.Join(queryParts, " ")
// Security check for dangerous patterns // Security check for dangerous patterns in user input values
if qb.enableSecurityChecks { if qb.enableSecurityChecks {
if err := qb.checkForSqlInjection(finalSQL); err != nil { if err := qb.checkForSqlInjectionInArgs(allArgs); err != nil {
return "", nil, err return "", nil, err
} }
} }
@@ -1327,16 +1327,20 @@ func (qb *QueryBuilder) escapeIdentifier(col string) string {
} }
} }
// checkForSqlInjection checks for potential SQL injection patterns // checkForSqlInjectionInArgs checks for potential SQL injection patterns in query arguments
func (qb *QueryBuilder) checkForSqlInjection(sql string) error { func (qb *QueryBuilder) checkForSqlInjectionInArgs(args []interface{}) error {
if !qb.enableSecurityChecks { if !qb.enableSecurityChecks {
return nil return nil
} }
lowerSQL := strings.ToLower(sql) for _, arg := range args {
for _, pattern := range qb.dangerousPatterns { if str, ok := arg.(string); ok {
if pattern.MatchString(lowerSQL) { lowerStr := strings.ToLower(str)
return fmt.Errorf("potential SQL injection detected: pattern %s matched", pattern.String()) for _, pattern := range qb.dangerousPatterns {
if pattern.MatchString(lowerStr) {
return fmt.Errorf("potential SQL injection detected in query argument: pattern %s matched", pattern.String())
}
}
} }
} }
return nil return nil

View File

@@ -0,0 +1,943 @@
package main
import (
"context"
"fmt"
"log"
"net/url"
"time"
"api-service/internal/config"
"api-service/internal/database"
"api-service/internal/utils/query"
"api-service/internal/validation"
"github.com/jmoiron/sqlx"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
)
// This file provides comprehensive examples of using the query builder library
// for performing various database operations including CRUD, transactions, joins, etc.
// Each example function demonstrates how to build queries, print them, and execute them.
// =============================================================================
// DEFINISI MODEL (CONTOH)
// =============================================================================
// User adalah contoh struct untuk tabel 'users'.
type User struct {
ID int `db:"id" bson:"_id,omitempty"`
Name string `db:"name" bson:"name"`
Email string `db:"email" bson:"email"`
Status string `db:"status" bson:"status"`
CreatedAt time.Time `db:"created_at" bson:"created_at"`
}
// Post adalah contoh struct untuk tabel 'posts'.
type Post struct {
ID int `db:"id" bson:"_id,omitempty"`
UserID int `db:"user_id" bson:"user_id"`
Title string `db:"title" bson:"title"`
Content string `db:"content" bson:"content"`
CreatedAt time.Time `db:"created_at" bson:"created_at"`
}
// Employee adalah contoh struct untuk tabel 'employees' dengan kolom JSON.
type Employee struct {
ID int `db:"id" bson:"_id,omitempty"`
Name string `db:"name" bson:"name"`
Department string `db:"department" bson:"department"`
Salary float64 `db:"salary" bson:"salary"`
Metadata map[string]interface{} `db:"metadata" bson:"metadata"` // Kolom JSON/JSONB
}
// =============================================================================
// FUNGSI UTAMA
// =============================================================================
func main() {
cfg := setupConfig()
dbService := database.New(cfg)
fmt.Println("============================================================")
fmt.Println(" CONTOH 1: QUERY DASAR (SELECT, INSERT, UPDATE, DELETE)")
fmt.Println("============================================================")
basicCRUDExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 2: TRANSAKSI SQL (POSTGRESQL)")
fmt.Println("============================================================")
sqlTransactionExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 3: TRANSAKSI MONGODB")
fmt.Println("============================================================")
mongoTransactionExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 4: QUERY DENGAN FILTER DAN PAGINASI")
fmt.Println("============================================================")
filterAndPaginationExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 5: QUERY DENGAN JOIN")
fmt.Println("============================================================")
joinExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 6: QUERY DENGAN CTE (COMMON TABLE EXPRESSION)")
fmt.Println("============================================================")
cteExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 7: QUERY DENGAN WINDOW FUNCTION")
fmt.Println("============================================================")
windowFunctionExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 8: VALIDASI DATA DINAMIS")
fmt.Println("============================================================")
validationExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 9: OPERASI JSON")
fmt.Println("============================================================")
jsonQueryExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 10: QUERY MONGODB (CRUD & AGGREGATION)")
fmt.Println("============================================================")
mongodbExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 11: PENGGUNAAN READ REPLICA")
fmt.Println("============================================================")
readReplicaExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 12: HEALTH CHECK DATABASE")
fmt.Println("============================================================")
healthCheckExample(dbService)
fmt.Println("\n============================================================")
fmt.Println(" CONTOH 13: PARSING QUERY DARI URL")
fmt.Println("============================================================")
urlQueryParsingExample(dbService)
}
func setupConfig() *config.Config {
return &config.Config{
Databases: map[string]config.DatabaseConfig{
"main": {
Type: "postgres",
Host: "localhost",
Port: 5432,
Username: "user",
Password: "password",
Database: "company_db",
SSLMode: "disable",
MaxOpenConns: 25,
MaxIdleConns: 5,
ConnMaxLifetime: time.Hour,
},
},
"mongodb": config.DatabaseConfig{
Type: "mongodb",
Host: "localhost",
Port: 27017,
Database: "company_db",
Username: "user",
Password: "password",
},
}
}
// =============================================================================
// CONTOH 1: QUERY DASAR (CRUD)
// =============================================================================
// basicCRUDExample demonstrates basic Create, Read, Update, Delete operations using the query builder.
// It shows how to build SQL queries, print them, and execute them while displaying results.
// Expected output: Prints INSERT SQL and result (new ID), SELECT SQL and user data, UPDATE SQL and affected rows, DELETE SQL and affected rows.
// Example raw queries:
// INSERT: INSERT INTO users (name, email, status) VALUES ($1, $2, $3) RETURNING id
// SELECT: SELECT * FROM users WHERE id = $1
// UPDATE: UPDATE users SET status = $1 WHERE id = $2
// DELETE: DELETE FROM users WHERE id = $1
func basicCRUDExample(dbService database.Service) {
ctx := context.Background()
db, err := dbService.GetSQLXDB("main")
if err != nil {
log.Printf("Gagal mendapatkan koneksi DB: %v", err)
return
}
qb := query.NewQueryBuilder(query.DBTypePostgreSQL)
// --- INSERT ---
fmt.Println("\n--- Operasi INSERT ---")
insertData := query.InsertData{
Columns: []string{"name", "email", "status"},
Values: []interface{}{"Alice", "alice@example.com", "active"},
}
sql, args, err := qb.BuildInsertQuery("users", insertData, "id")
if err != nil {
log.Printf("Error building INSERT: %v", err)
return
}
fmt.Printf("Generated INSERT SQL: %s\nArgs: %v\n", sql, args)
result, err := qb.ExecuteInsert(ctx, db, "users", insertData, "id")
if err != nil {
log.Printf("Error INSERT: %v", err)
return
}
newID, _ := result.LastInsertId()
fmt.Printf("-> INSERT: Berhasil menambah user dengan ID: %d\n", newID)
// --- SELECT (Single Row) ---
fmt.Println("\n--- Operasi SELECT ---")
var user User
selectQuery := query.DynamicQuery{
Fields: []query.SelectField{{Expression: "*"}},
From: "users",
Filters: []query.FilterGroup{{
Filters: []query.DynamicFilter{{Column: "id", Operator: query.OpEqual, Value: newID}},
}},
}
sql, args, err = qb.BuildQuery(selectQuery)
if err != nil {
log.Printf("Error building SELECT: %v", err)
return
}
fmt.Printf("Generated SELECT SQL: %s\nArgs: %v\n", sql, args)
err = qb.ExecuteQueryRow(ctx, db, selectQuery, &user)
if err != nil {
log.Printf("Error SELECT single row: %v", err)
return
}
fmt.Printf("-> SELECT (Single Row): Berhasil mengambil user: %+v\n", user)
// --- UPDATE ---
fmt.Println("\n--- Operasi UPDATE ---")
updateData := query.UpdateData{
Columns: []string{"status"},
Values: []interface{}{"inactive"},
}
updateFilter := []query.FilterGroup{{
Filters: []query.DynamicFilter{{Column: "id", Operator: query.OpEqual, Value: newID}},
}}
sql, args, err = qb.BuildUpdateQuery("users", updateData, updateFilter)
if err != nil {
log.Printf("Error building UPDATE: %v", err)
return
}
fmt.Printf("Generated UPDATE SQL: %s\nArgs: %v\n", sql, args)
_, err = qb.ExecuteUpdate(ctx, db, "users", updateData, updateFilter)
if err != nil {
log.Printf("Error UPDATE: %v", err)
return
}
fmt.Printf("-> UPDATE: Berhasil memperbarui status user dengan ID: %d\n", newID)
// --- DELETE ---
fmt.Println("\n--- Operasi DELETE ---")
deleteFilter := []query.FilterGroup{{
Filters: []query.DynamicFilter{{Column: "id", Operator: query.OpEqual, Value: newID}},
}}
sql, args, err = qb.BuildDeleteQuery("users", deleteFilter)
if err != nil {
log.Printf("Error building DELETE: %v", err)
return
}
fmt.Printf("Generated DELETE SQL: %s\nArgs: %v\n", sql, args)
_, err = qb.ExecuteDelete(ctx, db, "users", deleteFilter)
if err != nil {
log.Printf("Error DELETE: %v", err)
return
}
fmt.Printf("-> DELETE: Berhasil menghapus user dengan ID: %d\n", newID)
}
// =============================================================================
// CONTOH 2: TRANSAKSI SQL (POSTGRESQL)
// =============================================================================
// sqlTransactionExample demonstrates how to perform atomic transactions involving updates
// across multiple tables using the Query Builder. It builds and prints SQL queries before execution.
// Expected output: Prints UPDATE SQL for salaries and employees, transaction commit/rollback status, and validation results.
// Example raw queries:
// UPDATE salaries: UPDATE salaries SET salary = $1 WHERE employee_id = $2
// UPDATE employees: UPDATE employees SET last_name = $1 WHERE employee_id = $2
func sqlTransactionExample(dbService database.Service) {
ctx := context.Background()
employeeID := 123
newSalary := 75000
newLastName := "Doe"
db, err := dbService.GetSQLXDB("main")
if err != nil {
log.Fatalf("Gagal mendapatkan koneksi database SQL: %v", err)
}
qb := query.NewQueryBuilder(query.DBTypePostgreSQL)
tx, err := db.BeginTxx(ctx, nil)
if err != nil {
log.Fatalf("Gagal memulai transaksi SQL: %v", err)
}
defer func() {
if p := recover(); p != nil {
fmt.Println("Terjadi panic, melakukan rollback transaksi...")
_ = tx.Rollback()
panic(p)
} else if err != nil {
fmt.Printf("Transaksi dibatalkan (ROLLBACK) karena error: %v\n", err)
_ = tx.Rollback()
} else {
fmt.Println("Tidak ada error, melakukan COMMIT transaksi...")
err = tx.Commit()
if err != nil {
log.Printf("Gagal melakukan COMMIT transaksi: %v", err)
}
}
}()
fmt.Printf("Memulai transaksi untuk employee_id: %d\n", employeeID)
// --- Operasi 1: Update gaji di tabel 'salaries' ---
fmt.Println("\n--- Operasi 1: UPDATE salaries ---")
salariesUpdateData := query.UpdateData{
Columns: []string{"salary"},
Values: []interface{}{newSalary},
}
salariesFilter := []query.FilterGroup{
{
Filters: []query.DynamicFilter{
{Column: "employee_id", Operator: query.OpEqual, Value: employeeID},
},
},
}
sql, args, err := qb.BuildUpdateQuery("salaries", salariesUpdateData, salariesFilter)
if err != nil {
log.Printf("Error building UPDATE salaries: %v", err)
return
}
fmt.Printf("Generated UPDATE salaries SQL: %s\nArgs: %v\n", sql, args)
salariesResult, err := qb.ExecuteUpdate(ctx, tx, "salaries", salariesUpdateData, salariesFilter)
if err != nil {
return
}
salariesRowsAffected, _ := salariesResult.RowsAffected()
fmt.Printf("-> UPDATE salaries: %d baris terpengaruh.\n", salariesRowsAffected)
// --- Operasi 2: Update informasi di tabel 'employees' ---
fmt.Println("\n--- Operasi 2: UPDATE employees ---")
employeesUpdateData := query.UpdateData{
Columns: []string{"last_name"},
Values: []interface{}{newLastName},
}
employeesFilter := []query.FilterGroup{
{
Filters: []query.DynamicFilter{
{Column: "employee_id", Operator: query.OpEqual, Value: employeeID},
},
},
}
sql, args, err = qb.BuildUpdateQuery("employees", employeesUpdateData, employeesFilter)
if err != nil {
log.Printf("Error building UPDATE employees: %v", err)
return
}
fmt.Printf("Generated UPDATE employees SQL: %s\nArgs: %v\n", sql, args)
employeesResult, err := qb.ExecuteUpdate(ctx, tx, "employees", employeesUpdateData, employeesFilter)
if err != nil {
return
}
employeesRowsAffected, _ := employeesResult.RowsAffected()
fmt.Printf("-> UPDATE employees: %d baris terpengaruh.\n", employeesRowsAffected)
// --- Validasi Akhir Transaksi ---
if salariesRowsAffected == 1 && employeesRowsAffected == 1 {
fmt.Println("-> Validasi BERHASIL: Kedua tabel berhasil diperbarui.")
} else {
err = fmt.Errorf("validasi GAGAL: diharapkan 1 baris terupdate di setiap tabel, tetapi mendapat %d (salaries) dan %d (employees)", salariesRowsAffected, employeesRowsAffected)
return
}
}
// =============================================================================
// CONTOH 3: TRANSAKSI MONGODB
// =============================================================================
// mongoTransactionExample demonstrates MongoDB transactions using the query builder.
// It prints the filters and update operations before executing them in a transaction.
// Expected output: Prints MongoDB filters and update operations for salaries and employees, transaction commit/abort status, and validation results.
// Example raw queries:
// MongoDB filters: {"employee_id": 123}
// MongoDB updates: {"$set": {"salary": 75000}}, {"$set": {"last_name": "Doe"}}
func mongoTransactionExample(dbService database.Service) {
ctx := context.Background()
employeeID := 123
newSalary := 75000
newLastName := "Doe"
client, err := dbService.GetMongoClient("mongodb")
if err != nil {
log.Fatalf("Gagal mendapatkan klien MongoDB: %v", err)
}
salariesCollection := client.Database("company_db").Collection("salaries")
employeesCollection := client.Database("company_db").Collection("employees")
session, err := client.StartSession()
if err != nil {
log.Fatalf("Gagal memulai sesi MongoDB: %v", err)
}
defer session.EndSession(ctx)
fmt.Printf("Memulai transaksi MongoDB untuk employee_id: %d\n", employeeID)
_, err = session.WithTransaction(ctx, func(sessCtx mongo.SessionContext) (interface{}, error) {
// --- Operasi 1: Update gaji di koleksi 'salaries' ---
fmt.Println("\n--- Operasi 1: UPDATE salaries ---")
salariesFilter := bson.M{"employee_id": employeeID}
salariesUpdate := bson.M{"$set": bson.M{"salary": newSalary}}
fmt.Printf("-> MongoDB Update Salaries Filter: %#v\n", salariesFilter)
fmt.Printf("-> MongoDB Update Salaries Operation: %#v\n", salariesUpdate)
salariesResult, err := salariesCollection.UpdateOne(sessCtx, salariesFilter, salariesUpdate)
if err != nil {
return nil, fmt.Errorf("gagal update koleksi salaries: %w", err)
}
fmt.Printf("-> UPDATE salaries: %d dokumen cocok (matched).\n", salariesResult.MatchedCount)
// --- Operasi 2: Update informasi di koleksi 'employees' ---
fmt.Println("\n--- Operasi 2: UPDATE employees ---")
employeesFilter := bson.M{"employee_id": employeeID}
employeesUpdate := bson.M{"$set": bson.M{"last_name": newLastName}}
fmt.Printf("-> MongoDB Update Employees Filter: %#v\n", employeesFilter)
fmt.Printf("-> MongoDB Update Employees Operation: %#v\n", employeesUpdate)
employeesResult, err := employeesCollection.UpdateOne(sessCtx, employeesFilter, employeesUpdate)
if err != nil {
return nil, fmt.Errorf("gagal update koleksi employees: %w", err)
}
fmt.Printf("-> UPDATE employees: %d dokumen cocok (matched).\n", employeesResult.MatchedCount)
// --- Validasi Akhir Transaksi ---
if salariesResult.MatchedCount == 1 && employeesResult.MatchedCount == 1 {
fmt.Println("-> Validasi BERHASIL: Kedua koleksi berhasil diperbarui.")
return nil, nil
}
return nil, fmt.Errorf("validasi GAGAL: diharapkan 1 dokumen terupdate di setiap koleksi, tetapi mendapat %d (salaries) dan %d (employees)", salariesResult.MatchedCount, employeesResult.MatchedCount)
})
if err != nil {
fmt.Printf("Transaksi MongoDB dibatalkan (ABORT) karena error: %v\n", err)
} else {
fmt.Println("Transaksi MongoDB berhasil di-commit.")
}
}
// =============================================================================
// CONTOH 4: FILTER DAN PAGINASI
// =============================================================================
// filterAndPaginationExample demonstrates querying with filters and pagination.
// It builds and prints the SELECT query before executing it.
// Expected output: Prints SELECT SQL with filters and pagination, and the number of active users found.
// Example raw query:
// SELECT id, name FROM users WHERE (status = $1 AND created_at > $2) ORDER BY name ASC LIMIT 5 OFFSET 10
func filterAndPaginationExample(dbService database.Service) {
ctx := context.Background()
db, err := dbService.GetSQLXDB("main")
if err != nil {
log.Printf("Gagal mendapatkan koneksi DB: %v", err)
return
}
qb := query.NewQueryBuilder(query.DBTypePostgreSQL)
query := query.DynamicQuery{
Fields: []query.SelectField{
{Expression: "id"},
{Expression: "name"},
},
From: "users",
Filters: []query.FilterGroup{
{
LogicOp: "AND",
Filters: []query.DynamicFilter{
{Column: "status", Operator: query.OpEqual, Value: "active"},
{Column: "created_at", Operator: query.OpGreaterThan, Value: time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC)},
},
},
},
Sort: []query.SortField{{Column: "name", Order: "ASC"}},
Limit: 5,
Offset: 10,
}
var users []User
sql, args, err := qb.BuildQuery(query)
if err != nil {
log.Printf("Error building SELECT: %v", err)
return
}
fmt.Printf("Generated SELECT SQL: %s\nArgs: %v\n", sql, args)
err = qb.ExecuteQuery(ctx, db, query, &users)
if err != nil {
log.Printf("Error query dengan filter: %v", err)
return
}
fmt.Printf("-> Filter & Paginasi: Ditemukan %d user aktif (halaman 3).\n", len(users))
}
// =============================================================================
// CONTOH 5: QUERY DENGAN JOIN
// =============================================================================
// joinExample demonstrates querying with JOIN operations.
// It builds and prints the JOIN query before executing it.
// Expected output: Prints JOIN SQL query and the number of posts with author names found.
// Example raw query:
// SELECT p.id AS post_id, p.title, u.name AS author_name FROM posts p INNER JOIN users u ON p.user_id = u.id LIMIT 10
func joinExample(dbService database.Service) {
ctx := context.Background()
db, err := dbService.GetSQLXDB("main")
if err != nil {
log.Printf("Gagal mendapatkan koneksi DB: %v", err)
return
}
qb := query.NewQueryBuilder(query.DBTypePostgreSQL)
query := query.DynamicQuery{
Fields: []query.SelectField{
{Expression: "p.id", Alias: "post_id"},
{Expression: "p.title"},
{Expression: "u.name", Alias: "author_name"},
},
From: "posts",
Aliases: "p",
Joins: []query.Join{
{
Type: "INNER",
Table: "users",
Alias: "u",
OnConditions: query.FilterGroup{
Filters: []query.DynamicFilter{
{Column: "p.user_id", Operator: query.OpEqual, Value: "u.id"},
},
},
},
},
Limit: 10,
}
var results []struct {
PostID int `db:"post_id"`
Title string `db:"title"`
AuthorName string `db:"author_name"`
}
sql, args, err := qb.BuildQuery(query)
if err != nil {
log.Printf("Error building JOIN: %v", err)
return
}
fmt.Printf("Generated JOIN SQL: %s\nArgs: %v\n", sql, args)
err = qb.ExecuteQuery(ctx, db, query, &results)
if err != nil {
log.Printf("Error query JOIN: %v", err)
return
}
fmt.Printf("-> JOIN: Ditemukan %d post dengan nama penulis.\n", len(results))
}
// =============================================================================
// CONTOH 6: QUERY DENGAN CTE
// =============================================================================
// cteExample demonstrates querying with Common Table Expressions (CTE).
// It builds and prints the CTE query before executing it.
// Expected output: Prints CTE SQL query and the number of users with more than 5 posts.
func cteExample(dbService database.Service) {
ctx := context.Background()
db, err := dbService.GetSQLXDB("main")
if err != nil {
log.Printf("Gagal mendapatkan koneksi DB: %v", err)
return
}
qb := query.NewQueryBuilder(query.DBTypePostgreSQL)
query := query.DynamicQuery{
CTEs: []query.CTE{
{
Name: "user_post_counts",
Query: query.DynamicQuery{
Fields: []query.SelectField{
{Expression: "user_id"},
{Expression: "COUNT(*)", Alias: "post_count"},
},
From: "posts",
GroupBy: []string{"user_id"},
},
},
},
Fields: []query.SelectField{
{Expression: "u.name"},
{Expression: "upc.post_count"},
},
From: "users u",
Joins: []query.Join{
{
Type: "INNER",
Table: "user_post_counts",
Alias: "upc",
OnConditions: query.FilterGroup{
Filters: []query.DynamicFilter{
{Column: "u.id", Operator: query.OpEqual, Value: "upc.user_id"},
},
},
},
},
Filters: []query.FilterGroup{
{
Filters: []query.DynamicFilter{
{Column: "upc.post_count", Operator: query.OpGreaterThan, Value: 5},
},
},
},
}
var results []struct {
Name string `db:"name"`
PostCount int `db:"post_count"`
}
sql, args, err := qb.BuildQuery(query)
if err != nil {
log.Printf("Error building CTE: %v", err)
return
}
fmt.Printf("Generated CTE SQL: %s\nArgs: %v\n", sql, args)
err = qb.ExecuteQuery(ctx, db, query, &results)
if err != nil {
log.Printf("Error query CTE: %v", err)
return
}
fmt.Printf("-> CTE: Ditemukan %d user dengan lebih dari 5 post.\n", len(results))
}
// =============================================================================
// CONTOH 7: WINDOW FUNCTION
// =============================================================================
// windowFunctionExample demonstrates querying with window functions.
// It builds and prints the window function query before executing it.
// Expected output: Prints window function SQL query and the number of employees with salary rankings.
func windowFunctionExample(dbService database.Service) {
ctx := context.Background()
db, err := dbService.GetSQLXDB("main")
if err != nil {
log.Printf("Gagal mendapatkan koneksi DB: %v", err)
return
}
qb := query.NewQueryBuilder(query.DBTypePostgreSQL)
query := query.DynamicQuery{
Fields: []query.SelectField{
{Expression: "name"},
{Expression: "department"},
{Expression: "salary"},
},
From: "employees",
WindowFunctions: []query.WindowFunction{
{
Function: "RANK",
Over: "department",
OrderBy: "salary DESC",
Alias: "salary_rank",
},
},
Filters: []query.FilterGroup{
{
Filters: []query.DynamicFilter{
{Column: "department", Operator: query.OpEqual, Value: "Engineering"},
},
},
},
}
var results []struct {
Name string `db:"name"`
Department string `db:"department"`
Salary float64 `db:"salary"`
SalaryRank int `db:"salary_rank"`
}
sql, args, err := qb.BuildQuery(query)
if err != nil {
log.Printf("Error building Window Function: %v", err)
return
}
fmt.Printf("Generated Window Function SQL: %s\nArgs: %v\n", sql, args)
err = qb.ExecuteQuery(ctx, db, query, &results)
if err != nil {
log.Printf("Error query Window Function: %v", err)
return
}
fmt.Printf("-> Window Function: Ditemukan %d employee di departemen Engineering dengan peringkat gaji.\n", len(results))
}
// =============================================================================
// CONTOH 8: VALIDASI DATA DINAMIS
// =============================================================================
// validationExample demonstrates dynamic data validation using the query builder.
// It builds and prints the validation query before executing it.
// Expected output: Prints validation SQL query and whether the email is duplicate or available.
func validationExample(dbService database.Service) {
ctx := context.Background()
db, err := dbService.GetSQLXDB("main")
if err != nil {
log.Printf("Gagal mendapatkan koneksi DB: %v", err)
return
}
qb := query.NewQueryBuilder(query.DBTypePostgreSQL)
validator := validation.NewDynamicValidator(qb)
userData := map[string]interface{}{"email": "test@example.com"}
emailRule := validation.NewUniqueFieldRule("users", "email")
// Build and print the validation query
countQuery := query.DynamicQuery{
From: "users",
Filters: []query.FilterGroup{{
Filters: []query.DynamicFilter{{Column: "email", Operator: query.OpEqual, Value: "test@example.com"}},
}},
}
sql, args, err := qb.BuildCountQuery(countQuery)
if err != nil {
log.Printf("Error building validation query: %v", err)
return
}
fmt.Printf("Generated Validation SQL: %s\nArgs: %v\n", sql, args)
isDuplicate, err := validator.Validate(ctx, db, emailRule, userData)
if err != nil {
log.Printf("Error validasi: %v", err)
return
}
if isDuplicate {
fmt.Println("-> Validasi: Email 'test@example.com' sudah ada.")
} else {
fmt.Println("-> Validasi: Email 'test@example.com' tersedia.")
}
}
// =============================================================================
// CONTOH 9: OPERASI JSON
// =============================================================================
// jsonQueryExample demonstrates JSON operations in queries.
// It builds and prints the JSON queries before executing them.
// Expected output: Prints JSON SELECT and UPDATE SQL queries, number of employees found, and update success message.
func jsonQueryExample(dbService database.Service) {
ctx := context.Background()
db, err := dbService.GetSQLXDB("main")
if err != nil {
log.Printf("Gagal mendapatkan koneksi DB: %v", err)
return
}
qb := query.NewQueryBuilder(query.DBTypePostgreSQL)
query := query.DynamicQuery{
Fields: []query.SelectField{{Expression: "*"}},
From: "employees",
Filters: []query.FilterGroup{{
Filters: []query.DynamicFilter{
{
Column: "metadata",
Operator: query.OpJsonEqual,
Value: "Engineering",
Options: map[string]interface{}{"path": "department"},
},
},
}},
}
var employees []Employee
sql, args, err := qb.BuildQuery(query)
if err != nil {
log.Printf("Error building JSON query: %v", err)
return
}
fmt.Printf("Generated JSON SELECT SQL: %s\nArgs: %v\n", sql, args)
err = qb.ExecuteQuery(ctx, db, query, &employees)
if err != nil {
log.Printf("Error query JSON: %v", err)
return
}
fmt.Printf("-> Operasi JSON: Ditemukan %d employee di departemen Engineering (dari metadata JSON).\n", len(employees))
updateData := query.UpdateData{
JsonUpdates: map[string]query.JsonUpdate{
"metadata": {Path: "role", Value: "Senior Developer"},
},
}
filter := []query.FilterGroup{{Filters: []query.DynamicFilter{{Column: "id", Operator: query.OpEqual, Value: 1}}}}
sql, args, err = qb.BuildUpdateQuery("employees", updateData, filter)
if err != nil {
log.Printf("Error building JSON update: %v", err)
return
}
fmt.Printf("Generated JSON UPDATE SQL: %s\nArgs: %v\n", sql, args)
_, err = qb.ExecuteUpdate(ctx, db, "employees", updateData, filter)
if err != nil {
log.Printf("Error update JSON: %v", err)
return
}
fmt.Println("-> Operasi JSON: Berhasil memperbarui 'role' di metadata untuk employee ID 1.")
}
// =============================================================================
// CONTOH 10: QUERY MONGODB
// =============================================================================
// mongodbExample demonstrates MongoDB queries using the query builder.
// It prints the built filters and pipelines before executing them.
// Expected output: Prints MongoDB find filter, number of active users, aggregation pipeline, and number of departments.
func mongodbExample(dbService database.Service) {
ctx := context.Background()
client, err := dbService.GetMongoClient("mongodb")
if err != nil {
log.Printf("Gagal mendapatkan klien MongoDB: %v", err)
return
}
collection := client.Database("company_db").Collection("users")
mqb := query.NewMongoQueryBuilder()
// --- FIND ---
fmt.Println("\n--- Operasi FIND ---")
findQuery := query.DynamicQuery{
Filters: []query.FilterGroup{{Filters: []query.DynamicFilter{{Column: "status", Operator: query.OpEqual, Value: "active"}}}},
Limit: 5,
}
filter, _, _ := mqb.BuildFindQuery(findQuery)
fmt.Printf("-> MongoDB Find Filter: %#v\n", filter)
var users []User
err = mqb.ExecuteFind(ctx, collection, findQuery, &users)
if err != nil {
log.Printf("Error MongoDB Find: %v", err)
return
}
fmt.Printf("-> MongoDB Find: Ditemukan %d user aktif.\n", len(users))
// --- AGGREGATION ---
fmt.Println("\n--- Operasi AGGREGATION ---")
aggQuery := query.DynamicQuery{
Fields: []query.SelectField{
{Expression: "department", Alias: "_id"},
{Expression: "COUNT(*)", Alias: "count"},
},
GroupBy: []string{"department"},
}
pipeline, _ := mqb.BuildAggregateQuery(aggQuery)
fmt.Printf("-> MongoDB Aggregation Pipeline: %#v\n", pipeline)
var aggResults []struct {
ID string `bson:"_id"`
Count int `bson:"count"`
}
err = mqb.ExecuteAggregate(ctx, collection, aggQuery, &aggResults)
if err != nil {
log.Printf("Error MongoDB Aggregate: %v", err)
return
}
fmt.Printf("-> MongoDB Aggregate: Ditemukan user di %d departemen.\n", len(aggResults))
}
// =============================================================================
// CONTOH 11: PENGGUNAAN READ REPLICA
// =============================================================================
// readReplicaExample demonstrates using read replicas for queries.
// It builds and prints the count query before executing it on the read replica.
// Expected output: Prints COUNT SQL query and the total number of users from the read replica.
// Example raw query:
// SELECT COUNT(*) FROM users
func readReplicaExample(dbService database.Service) {
ctx := context.Background()
readDB, err := dbService.GetReadDB("main")
if err != nil {
log.Printf("Gagal mendapatkan read replica: %v", err)
return
}
readxDB := sqlx.NewDb(readDB, "pgx")
qb := query.NewQueryBuilder(query.DBTypePostgreSQL)
countQuery := query.DynamicQuery{From: "users"}
sql, args, err := qb.BuildCountQuery(countQuery)
if err != nil {
log.Printf("Error building count query: %v", err)
return
}
fmt.Printf("Generated COUNT SQL: %s\nArgs: %v\n", sql, args)
count, err := qb.ExecuteCount(ctx, readxDB, countQuery)
if err != nil {
log.Printf("Error query di read replica: %v", err)
return
}
fmt.Printf("-> Read Replica: Total user (dari read replica): %d\n", count)
}
// =============================================================================
// CONTOH 12: HEALTH CHECK DATABASE
// =============================================================================
// healthCheckExample demonstrates database health checks.
// It prints the health status of all databases.
// Expected output: Prints health status for each database (up/down with type or error).
func healthCheckExample(dbService database.Service) {
healthStatus := dbService.Health()
fmt.Println("-> Health Check Status:")
for dbName, status := range healthStatus {
if status["status"] == "up" {
fmt.Printf(" - Database %s: SEHAT (%s)\n", dbName, status["type"])
} else {
fmt.Printf(" - Database %s: TIDAK SEHAT - %s\n", dbName, status["error"])
}
}
}
// =============================================================================
// CONTOH 13: PARSING QUERY DARI URL
// =============================================================================
// urlQueryParsingExample demonstrates parsing query parameters from URL.
// It parses the URL query and prints the resulting dynamic query structure.
// Expected output: Prints parsed fields, filters, sort, and limit from the URL query.
func urlQueryParsingExample(dbService database.Service) {
values := url.Values{}
values.Set("fields", "id,name")
values.Set("filter[status][_eq]", "active")
values.Set("filter[age][_gt]", "25")
values.Set("sort", "-name")
values.Set("limit", "10")
parser := query.NewQueryParser()
dynamicQuery, err := parser.ParseQuery(values, "users")
if err != nil {
log.Printf("Error parsing URL query: %v", err)
return
}
fmt.Println("-> Parsing URL Query:")
fmt.Printf(" Fields: %v\n", dynamicQuery.Fields)
fmt.Printf(" Filters: %+v\n", dynamicQuery.Filters)
fmt.Printf(" Sort: %+v\n", dynamicQuery.Sort)
fmt.Printf(" Limit: %d\n", dynamicQuery.Limit)
}
// =============================================================================
// AKHIR FILE
// =============================================================================

View File

@@ -63,7 +63,11 @@ func NewDynamicValidator(qb *queryUtils.QueryBuilder) *DynamicValidator {
// `data` adalah map yang berisi nilai untuk kolom yang akan diperiksa (biasanya dari request body). // `data` adalah map yang berisi nilai untuk kolom yang akan diperiksa (biasanya dari request body).
// Mengembalikan `true` jika ada duplikat yang ditemukan (validasi gagal), `false` jika tidak ada duplikat (validasi berhasil). // Mengembalikan `true` jika ada duplikat yang ditemukan (validasi gagal), `false` jika tidak ada duplikat (validasi berhasil).
func (dv *DynamicValidator) Validate(ctx context.Context, db *sqlx.DB, rule ValidationRule, data map[string]interface{}) (bool, error) { func (dv *DynamicValidator) Validate(ctx context.Context, db *sqlx.DB, rule ValidationRule, data map[string]interface{}) (bool, error) {
// LOGGING: Log validation start
fmt.Printf("[VALIDATION] Starting validation for table: %s, unique columns: %v, data: %v\n", rule.TableName, rule.UniqueColumns, data)
if len(rule.UniqueColumns) == 0 { if len(rule.UniqueColumns) == 0 {
fmt.Printf("[VALIDATION] ERROR: ValidationRule must have at least one UniqueColumn\n")
return false, fmt.Errorf("ValidationRule must have at least one UniqueColumn") return false, fmt.Errorf("ValidationRule must have at least one UniqueColumn")
} }
@@ -72,12 +76,14 @@ func (dv *DynamicValidator) Validate(ctx context.Context, db *sqlx.DB, rule Vali
// Tambahkan kondisi tambahan (misalnya, status != 'deleted') // Tambahkan kondisi tambahan (misalnya, status != 'deleted')
allFilters = append(allFilters, rule.Conditions...) allFilters = append(allFilters, rule.Conditions...)
fmt.Printf("[VALIDATION] Added %d condition filters\n", len(rule.Conditions))
// 2. Bangun filter untuk kolom unik berdasarkan data yang diberikan // 2. Bangun filter untuk kolom unik berdasarkan data yang diberikan
for _, colName := range rule.UniqueColumns { for _, colName := range rule.UniqueColumns {
value, exists := data[colName] value, exists := data[colName]
if !exists { if !exists {
// Jika data untuk kolom unik tidak ada, ini adalah kesalahan pemrograman. // Jika data untuk kolom unik tidak ada, ini adalah kesalahan pemrograman.
fmt.Printf("[VALIDATION] ERROR: data for unique column '%s' not found in provided data map\n", colName)
return false, fmt.Errorf("data for unique column '%s' not found in provided data map", colName) return false, fmt.Errorf("data for unique column '%s' not found in provided data map", colName)
} }
allFilters = append(allFilters, queryUtils.DynamicFilter{ allFilters = append(allFilters, queryUtils.DynamicFilter{
@@ -85,6 +91,7 @@ func (dv *DynamicValidator) Validate(ctx context.Context, db *sqlx.DB, rule Vali
Operator: queryUtils.OpEqual, Operator: queryUtils.OpEqual,
Value: value, Value: value,
}) })
fmt.Printf("[VALIDATION] Added filter for column '%s' with value: %v\n", colName, value)
} }
// 3. Tambahkan filter pengecualian ID (untuk operasi UPDATE) // 3. Tambahkan filter pengecualian ID (untuk operasi UPDATE)
@@ -94,6 +101,7 @@ func (dv *DynamicValidator) Validate(ctx context.Context, db *sqlx.DB, rule Vali
Operator: queryUtils.OpNotEqual, Operator: queryUtils.OpNotEqual,
Value: rule.ExcludeIDValue, Value: rule.ExcludeIDValue,
}) })
fmt.Printf("[VALIDATION] Added exclude filter for column '%s' with value: %v\n", rule.ExcludeIDColumn, rule.ExcludeIDValue)
} }
// 4. Bangun dan eksekusi query untuk menghitung jumlah record yang cocok // 4. Bangun dan eksekusi query untuk menghitung jumlah record yang cocok
@@ -102,13 +110,20 @@ func (dv *DynamicValidator) Validate(ctx context.Context, db *sqlx.DB, rule Vali
Filters: []queryUtils.FilterGroup{{Filters: allFilters, LogicOp: "AND"}}, Filters: []queryUtils.FilterGroup{{Filters: allFilters, LogicOp: "AND"}},
} }
fmt.Printf("[VALIDATION] Built query with %d total filters\n", len(allFilters))
count, err := dv.qb.ExecuteCount(ctx, db, query) count, err := dv.qb.ExecuteCount(ctx, db, query)
if err != nil { if err != nil {
fmt.Printf("[VALIDATION] ERROR: failed to execute validation query for table %s: %v\n", rule.TableName, err)
return false, fmt.Errorf("failed to execute validation query for table %s: %w", rule.TableName, err) return false, fmt.Errorf("failed to execute validation query for table %s: %w", rule.TableName, err)
} }
fmt.Printf("[VALIDATION] Query executed successfully, count result: %d\n", count)
// 5. Kembalikan hasil // 5. Kembalikan hasil
return count > 0, nil result := count > 0
fmt.Printf("[VALIDATION] Validation result: isDuplicate=%t (count > 0: %d > 0 = %t)\n", result, count, result)
return result, nil
} }
// ============================================================================= // =============================================================================

View File

@@ -3,165 +3,278 @@ global:
output_dir: "internal/handlers" output_dir: "internal/handlers"
enable_swagger: true enable_swagger: true
enable_logging: true enable_logging: true
database:
default_connection: "postgres_satudata"
timeout_seconds: 30
services: services:
schedule: pasien:
name: "Jadwal Dokter" name: "Manajemen Data Pasien"
category: "schedule" category: "pasien"
package: "schedule" package: "pasien"
description: "Jadwal Dokter management" description: "API untuk mengelola data pasien dengan informasi lokasi lengkap"
base_url: "" base_url: ""
timeout: 30 timeout: 30
retry_count: 3 retry_count: 3
table_name: "m_pasien"
endpoints:
schedule:
description: "Jadwal dokter management"
handler_folder: "master"
handler_file: "schedule.go"
handler_name: "schedule"
table_name: "daftar_jadwal_dokter"
# Definisikan skema tabel di sini
# =
schema:
columns:
- name: "id"
type: "serial4"
primary_key: true
go_type: "string" # Override tipe Go, UUID biasanya string
- name: "Hari"
type: "int4"
nullable: true
- name: "Nama_hari"
type: "varchar"
nullable: true
- name: "Waktu"
type: "varchar"
nullable: true
- name: "Dokter"
type: "uuid"
nullable: true
go_type: "string" # Override tipe Go
- name: "Spesialis"
type: "int4"
nullable: true
- name: "Sub_spesialis"
type: "int4"
nullable: true
- name: "Status"
type: "int4"
nullable: true
# ======================================================================
functions:
list:
methods: ["GET"]
path: "/"
get_routes: "/"
get_path: "/"
model: "Schedule"
response_model: "ScheduleGetResponse"
description: "Get schedule list with pagination and filters"
summary: "Get Schedule List"
tags: ["Schedule"]
require_auth: true
cache_enabled: true
enable_database: true
cache_ttl: 300
has_pagination: true
has_filter: true
has_search: true
has_stats: true
get:
methods: ["GET"]
path: "/:id"
get_routes: "/:id"
get_path: "/:id"
model: "Schedule"
response_model: "ScheduleGetByIDResponse"
description: "Get schedule by ID"
summary: "Get schedule by ID"
tags: ["Schedule"]
require_auth: true
cache_enabled: true
enable_database: true
cache_ttl: 300
search: # Define all columns once for reuse
methods: ["GET"] columns:
path: "/search" - name: "nomr"
get_routes: "/search" type: "varchar"
get_path: "/search" nullable: true
model: "Schedule" go_type: "string"
response_model: "ScheduleGetResponse" description: "Nomor Rekam Medis"
description: "Search schedule" - name: "title"
summary: "Search Schedule" type: "varchar"
tags: ["Schedule"] nullable: true
require_auth: true go_type: "string"
cache_enabled: true description: "Gelar pasien (Tn, Ny, Sdr, dll)"
enable_database: true - name: "nama"
cache_ttl: 300 type: "varchar"
has_search: true nullable: true
go_type: "string"
create: validation: "required,min=1,max=100"
methods: ["POST"] description: "Nama lengkap pasien"
path: "/" - name: "tempat"
post_routes: "/" type: "varchar"
post_path: "/" nullable: true
model: "Schedule" go_type: "string"
response_model: "ScheduleCreateResponse" description: "Tempat lahir pasien"
request_model: "ScheduleCreateRequest" - name: "tgllahir"
description: "Create new schedule" type: "date"
summary: "Create Schedule" nullable: true
tags: ["Schedule"] go_type: "time.Time"
require_auth: true description: "Tanggal lahir pasien"
cache_enabled: false - name: "jeniskelamin"
enable_database: true type: "varchar"
cache_ttl: 0 nullable: true
go_type: "string"
update: validation: "oneof=L P"
methods: ["PUT"] description: "Jenis kelamin (L/P)"
path: "/:id" - name: "alamat"
put_routes: "/:id" type: "varchar"
put_path: "/:id" nullable: true
model: "Schedule" go_type: "string"
response_model: "ScheduleUpdateResponse" description: "Alamat lengkap pasien"
request_model: "ScheduleUpdateRequest" - name: "kelurahan"
description: "Update schedule" type: "int8"
summary: "Update Schedule" nullable: true
tags: ["Schedule"] go_type: "int64"
require_auth: true description: "ID Kelurahan"
cache_enabled: false - name: "kdkecamatan"
enable_database: true type: "int4"
cache_ttl: 0 nullable: true
go_type: "int32"
delete: description: "ID Kecamatan"
methods: ["DELETE"] - name: "kota"
path: "/:id" type: "int4"
delete_routes: "/:id" nullable: true
delete_path: "/:id" go_type: "int32"
model: "Schedule" description: "ID Kota"
response_model: "ScheduleDeleteResponse" - name: "kdprovinsi"
description: "Delete schedule" type: "int4"
summary: "Delete Schedule" nullable: true
tags: ["Schedule"] go_type: "int32"
require_auth: true description: "ID Provinsi"
cache_enabled: false - name: "agama"
enable_database: true type: "int4"
cache_ttl: 0 nullable: true
go_type: "int32"
stats: description: "ID Agama"
methods: ["GET"] - name: "no_kartu"
path: "/stats" type: "varchar"
get_routes: "/stats" nullable: true
get_path: "/stats" go_type: "string"
model: "AggregateData" description: "Nomor kartu identitas"
response_model: "AggregateData" - name: "noktp_baru"
description: "Get retribusi statistics" type: "varchar"
summary: "Get Retribusi Stats" nullable: true
tags: ["Retribusi"] go_type: "string"
require_auth: true description: "Nomor KTP baru"
cache_enabled: true
enable_database: true # Define relationships with other tables
cache_ttl: 180 relationships:
has_stats: true - name: "provinsi"
table: "m_provinsi"
foreign_key: "kdprovinsi"
local_key: "idprovinsi"
columns:
- name: "idprovinsi"
type: "int4"
nullable: false
go_type: "int32"
primary_key: true
- name: "namaprovinsi"
type: "varchar"
nullable: true
go_type: "string"
description: "Nama provinsi"
- name: "kota"
table: "m_kota"
foreign_key: "kota"
local_key: "idkota"
columns:
- name: "idkota"
type: "int4"
nullable: false
go_type: "int32"
primary_key: true
- name: "namakota"
type: "varchar"
nullable: true
go_type: "string"
description: "Nama kota"
- name: "kecamatan"
table: "m_kecamatan"
foreign_key: "kdkecamatan"
local_key: "idkecamatan"
columns:
- name: "idkecamatan"
type: "int8"
nullable: false
go_type: "int64"
primary_key: true
- name: "namakecamatan"
type: "varchar"
nullable: true
go_type: "string"
description: "Nama kecamatan"
- name: "kelurahan"
table: "m_kelurahan"
foreign_key: "kelurahan"
local_key: "idkelurahan"
columns:
- name: "idkelurahan"
type: "int8"
nullable: false
go_type: "int64"
primary_key: true
- name: "namakelurahan"
type: "varchar"
nullable: true
go_type: "string"
description: "Nama kelurahan"
# Define reusable field groups
field_groups:
base_fields: ["nomr", "title", "nama", "tempat", "tgllahir", "jeniskelamin"]
location_fields: ["alamat", "kelurahan", "kdkecamatan", "kota", "kdprovinsi"]
identity_fields: ["agama", "no_kartu", "noktp_baru"]
all_fields: ["nomr", "title", "nama", "tempat", "tgllahir", "jeniskelamin", "alamat", "kelurahan", "kdkecamatan", "kota", "kdprovinsi", "agama", "no_kartu", "noktp_baru"]
with_location_names: ["nomr", "title", "nama", "tempat", "tgllahir", "jeniskelamin", "alamat", "kelurahan", "namakelurahan", "kdkecamatan", "namakecamatan", "kota", "namakota", "kdprovinsi", "namaprovinsi", "agama", "no_kartu", "noktp_baru"]
# Define endpoints with reusable configurations
endpoints:
list:
methods: ["GET"]
path: "/"
description: "Get list of pasien with pagination and filters"
summary: "Get Pasien List"
tags: ["Pasien"]
require_auth: true
cache_enabled: true
cache_ttl: 300
has_pagination: true
has_filter: true
has_search: true
has_stats: true
fields: "with_location_names"
response_model: "PasienGetResponse"
get_by_nomr:
methods: ["GET"]
path: "/:nomr"
description: "Get pasien by NOMR"
summary: "Get Pasien by NOMR"
tags: ["Pasien"]
require_auth: true
cache_enabled: true
cache_ttl: 300
fields: "with_location_names"
response_model: "PasienGetByNOMRResponse"
create:
methods: ["POST"]
path: "/"
description: "Create a new pasien"
summary: "Create Pasien"
tags: ["Pasien"]
require_auth: true
fields: "all_fields"
request_model: "PasienCreateRequest"
response_model: "PasienCreateResponse"
update:
methods: ["PUT"]
path: "/:nomr"
description: "Update an existing pasien"
summary: "Update Pasien"
tags: ["Pasien"]
require_auth: true
fields: "all_fields"
request_model: "PasienUpdateRequest"
response_model: "PasienUpdateResponse"
delete:
methods: ["DELETE"]
path: "/:nomr"
description: "Delete a pasien"
summary: "Delete Pasien"
tags: ["Pasien"]
require_auth: true
soft_delete: false
response_model: "PasienDeleteResponse"
dynamic:
methods: ["GET"]
path: "/dynamic"
description: "Get pasien with dynamic filtering"
summary: "Get Pasien Dynamic"
tags: ["Pasien"]
require_auth: true
has_dynamic: true
fields: "with_location_names"
response_model: "PasienGetResponse"
search:
methods: ["GET"]
path: "/search"
description: "Search pasien by name or NOMR"
summary: "Search Pasien"
tags: ["Pasien"]
require_auth: true
has_search: true
fields: "with_location_names"
response_model: "PasienGetResponse"
stats:
methods: ["GET"]
path: "/stats"
description: "Get pasien statistics"
summary: "Get Pasien Stats"
tags: ["Pasien"]
require_auth: true
has_stats: true
response_model: "AggregateData"
by_location:
methods: ["GET"]
path: "/by-location"
description: "Get pasien by location (provinsi, kota, kecamatan, kelurahan)"
summary: "Get Pasien by Location"
tags: ["Pasien"]
require_auth: true
has_filter: true
fields: "with_location_names"
response_model: "PasienGetResponse"
by_age:
methods: ["GET"]
path: "/by-age"
description: "Get pasien statistics by age group"
summary: "Get Pasien by Age Group"
tags: ["Pasien"]
require_auth: true
has_stats: true
response_model: "PasienAgeStatsResponse"