2150 lines
63 KiB
Go
2150 lines
63 KiB
Go
package handlers
|
|
|
|
import (
|
|
"api-service/internal/config"
|
|
"api-service/internal/database"
|
|
models "api-service/internal/models"
|
|
pasienModels "api-service/internal/models/pasien"
|
|
queryUtils "api-service/internal/utils/query"
|
|
"api-service/internal/utils/validation"
|
|
"api-service/pkg/logger"
|
|
"context"
|
|
"database/sql"
|
|
"fmt"
|
|
"net/http"
|
|
"strconv"
|
|
"strings"
|
|
"sync"
|
|
"time"
|
|
|
|
"github.com/gin-gonic/gin"
|
|
"github.com/go-playground/validator/v10"
|
|
"github.com/jmoiron/sqlx"
|
|
"github.com/lib/pq"
|
|
)
|
|
|
|
// =============================================================================
|
|
// GLOBAL INITIALIZATION & VALIDATION
|
|
// =============================================================================
|
|
|
|
var (
|
|
db database.Service
|
|
once sync.Once
|
|
validate *validator.Validate
|
|
)
|
|
|
|
// Initialize the database connection and validator once
|
|
func init() {
|
|
once.Do(func() {
|
|
db = database.New(config.LoadConfig())
|
|
validate = validator.New()
|
|
validate.RegisterValidation("pasien_status", validatePasienStatus)
|
|
if db == nil {
|
|
logger.Fatal("Failed to initialize database connection")
|
|
}
|
|
})
|
|
}
|
|
|
|
// Custom validation for pasien status
|
|
func validatePasienStatus(fl validator.FieldLevel) bool {
|
|
return models.IsValidStatus(fl.Field().String())
|
|
}
|
|
|
|
// =============================================================================
|
|
// CACHE IMPLEMENTATION
|
|
// =============================================================================
|
|
|
|
// CacheEntry represents an entry in the cache
|
|
type CacheEntry struct {
|
|
Data interface{}
|
|
ExpiresAt time.Time
|
|
}
|
|
|
|
// IsExpired checks if the cache entry has expired
|
|
func (e *CacheEntry) IsExpired() bool {
|
|
return time.Now().After(e.ExpiresAt)
|
|
}
|
|
|
|
// InMemoryCache implements a simple in-memory cache with TTL
|
|
type InMemoryCache struct {
|
|
items sync.Map
|
|
mu sync.RWMutex
|
|
}
|
|
|
|
// NewInMemoryCache creates a new in-memory cache
|
|
func NewInMemoryCache() *InMemoryCache {
|
|
return &InMemoryCache{}
|
|
}
|
|
|
|
// Get retrieves an item from the cache
|
|
func (c *InMemoryCache) Get(key string) (interface{}, bool) {
|
|
val, ok := c.items.Load(key)
|
|
if !ok {
|
|
return nil, false
|
|
}
|
|
|
|
entry, ok := val.(*CacheEntry)
|
|
if !ok || entry.IsExpired() {
|
|
c.items.Delete(key)
|
|
return nil, false
|
|
}
|
|
|
|
return entry.Data, true
|
|
}
|
|
|
|
// Set stores an item in the cache with a TTL
|
|
func (c *InMemoryCache) Set(key string, value interface{}, ttl time.Duration) {
|
|
entry := &CacheEntry{
|
|
Data: value,
|
|
ExpiresAt: time.Now().Add(ttl),
|
|
}
|
|
c.items.Store(key, entry)
|
|
}
|
|
|
|
// Delete removes an item from the cache
|
|
func (c *InMemoryCache) Delete(key string) {
|
|
c.items.Delete(key)
|
|
}
|
|
|
|
// DeleteByPrefix removes all items with a specific prefix
|
|
func (c *InMemoryCache) DeleteByPrefix(prefix string) {
|
|
c.items.Range(func(key, value interface{}) bool {
|
|
if keyStr, ok := key.(string); ok && strings.HasPrefix(keyStr, prefix) {
|
|
c.items.Delete(key)
|
|
}
|
|
return true
|
|
})
|
|
}
|
|
|
|
// =============================================================================
|
|
// Pasien HANDLER STRUCT
|
|
// =============================================================================
|
|
|
|
// PasienHandler handles pasien services
|
|
type PasienHandler struct {
|
|
db database.Service
|
|
queryBuilder *queryUtils.QueryBuilder
|
|
validator *validation.DynamicValidator
|
|
cache *InMemoryCache
|
|
}
|
|
|
|
// NewPasienHandler creates a new PasienHandler with a pre-configured QueryBuilder
|
|
func NewPasienHandler() *PasienHandler {
|
|
// Initialize QueryBuilder with allowed columns list for security.
|
|
queryBuilder := queryUtils.NewQueryBuilder(queryUtils.DBTypePostgreSQL).
|
|
SetAllowedColumns([]string{
|
|
"id",
|
|
"nomr",
|
|
"status",
|
|
"title",
|
|
"nama",
|
|
"tempat",
|
|
"tgllahir",
|
|
"jeniskelamin",
|
|
"alamat",
|
|
"kelurahan",
|
|
"kdkecamatan",
|
|
"kota",
|
|
"kdprovinsi",
|
|
"agama",
|
|
"no_kartu",
|
|
"noktp_baru",
|
|
"created_at",
|
|
"updated_at",
|
|
"idprovinsi",
|
|
"namaprovinsi",
|
|
"idkota",
|
|
"namakota",
|
|
"idkecamatan",
|
|
"namakecamatan",
|
|
"idkelurahan",
|
|
"namakelurahan",
|
|
})
|
|
|
|
return &PasienHandler{
|
|
db: db,
|
|
queryBuilder: queryBuilder,
|
|
validator: validation.NewDynamicValidator(queryBuilder),
|
|
cache: NewInMemoryCache(),
|
|
}
|
|
}
|
|
|
|
// =============================================================================
|
|
// HANDLER ENDPOINTS
|
|
// =============================================================================
|
|
|
|
// GetPasienByAge godoc
|
|
// @Summary Get Pasien by Age Group
|
|
// @Description Get pasien statistics by age group
|
|
// @Tags Pasien
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Param age_group query string false "Age group (child, teen, adult, senior)"
|
|
// @Success 200 {object} pasienModels.PasienAgeStatsResponse "Statistics data"
|
|
// @Failure 400 {object} models.ErrorResponse "Bad request"
|
|
// @Failure 500 {object} models.ErrorResponse "Internal server error"
|
|
// @Router /api/v1/pasien/by-age [get]
|
|
func (h *PasienHandler) GetPasienByAge(c *gin.Context) {
|
|
// Parse age group
|
|
ageGroup := c.Query("age_group")
|
|
validAgeGroups := map[string]bool{
|
|
"child": true, // 0-12 years
|
|
"teen": true, // 13-17 years
|
|
"adult": true, // 18-59 years
|
|
"senior": true, // 60+ years
|
|
}
|
|
|
|
if ageGroup == "" || !validAgeGroups[ageGroup] {
|
|
h.respondError(c, "Invalid age group", fmt.Errorf("age group must be one of: child, teen, adult, senior"), http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Use GetSQLXDB to get database connection
|
|
dbConn, err := h.db.GetSQLXDB("postgres_simrs")
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second)
|
|
defer cancel()
|
|
|
|
// Build query
|
|
query := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{
|
|
{Expression: "COUNT(*)", Alias: "count"},
|
|
},
|
|
Filters: []queryUtils.FilterGroup{{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"},
|
|
},
|
|
LogicOp: "AND",
|
|
}},
|
|
}
|
|
|
|
// Execute query
|
|
var result struct {
|
|
Count int `db:"count"`
|
|
}
|
|
err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, query, &result)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to get age statistics", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Build response
|
|
response := pasienModels.PasienAgeStatsResponse{
|
|
Message: fmt.Sprintf("Age group '%s' statistics retrieved successfully", ageGroup),
|
|
Data: map[string]interface{}{
|
|
"age_group": ageGroup,
|
|
"count": result.Count,
|
|
},
|
|
}
|
|
c.JSON(http.StatusOK, response)
|
|
}
|
|
|
|
// GetPasienDynamic godoc
|
|
// @Summary Get Pasien Dynamic
|
|
// @Description Get pasien with dynamic filtering
|
|
// @Tags Pasien
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Param fields query string false "Fields to select (e.g., fields=*.*)"
|
|
// @Param filter[column][operator] query string false "Dynamic filters (e.g., filter[name][_eq]=value)"
|
|
// @Param sort query string false "Sort fields (e.g., sort=date_created,-name)"
|
|
// @Param limit query int false "Limit" default(10)
|
|
// @Param offset query int false "Offset" default(0)
|
|
// @Success 200 {object} pasienModels.PasienGetResponse "Success response"
|
|
// @Failure 400 {object} models.ErrorResponse "Bad request"
|
|
// @Failure 500 {object} models.ErrorResponse "Internal server error"
|
|
// @Router /api/v1/pasien/dynamic [get]
|
|
func (h *PasienHandler) GetPasienDynamic(c *gin.Context) {
|
|
parser := queryUtils.NewQueryParser().SetLimits(10, 100)
|
|
dynamicQuery, err := parser.ParseQueryWithDefaultFields(c.Request.URL.Query(), "m_pasien", []string{
|
|
"id",
|
|
"nomr",
|
|
"status",
|
|
"title",
|
|
"nama",
|
|
"tempat",
|
|
"tgllahir",
|
|
"jeniskelamin",
|
|
"alamat",
|
|
"kelurahan",
|
|
"kdkecamatan",
|
|
"kota",
|
|
"kdprovinsi",
|
|
"agama",
|
|
"no_kartu",
|
|
"noktp_baru",
|
|
"namakelurahan",
|
|
"namakecamatan",
|
|
"namakota",
|
|
"namaprovinsi",
|
|
})
|
|
if err != nil {
|
|
h.respondError(c, "Invalid query parameters", err, http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Add joins for relationships using the correct structure
|
|
dynamicQuery.Joins = []queryUtils.Join{
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_provinsi",
|
|
Alias: "m_provinsi",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kota",
|
|
Alias: "m_kota",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kecamatan",
|
|
Alias: "m_kecamatan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kelurahan",
|
|
Alias: "m_kelurahan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
// Add default filter to exclude deleted records
|
|
dynamicQuery.Filters = append([]queryUtils.FilterGroup{{
|
|
Filters: []queryUtils.DynamicFilter{{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}},
|
|
LogicOp: "AND",
|
|
}}, dynamicQuery.Filters...)
|
|
|
|
// Try to get from cache first
|
|
// Create cache key from query string
|
|
cacheKey := fmt.Sprintf("pasien:dynamic:%s", c.Request.URL.RawQuery)
|
|
if cachedData, found := h.cache.Get(cacheKey); found {
|
|
logger.Info("Cache hit for dynamic query", map[string]interface{}{"cache_key": cacheKey})
|
|
|
|
// Convert from interface{} to expected type
|
|
if pasiens, ok := cachedData.([]pasienModels.Pasien); ok {
|
|
meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, len(pasiens))
|
|
response := pasienModels.PasienGetResponse{
|
|
Message: "Data pasien berhasil diambil (dari cache)",
|
|
Data: pasiens,
|
|
Meta: meta,
|
|
}
|
|
c.JSON(http.StatusOK, response)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Use GetSQLXDB to get database connection
|
|
dbConn, err := h.db.GetSQLXDB("postgres_simrs")
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second)
|
|
defer cancel()
|
|
|
|
pasiens, total, err := h.fetchPasiensDynamic(ctx, dbConn, dynamicQuery)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Save to cache
|
|
h.cache.Set(cacheKey, pasiens, 10*time.Minute) // Cache for 10 minutes
|
|
|
|
meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, total)
|
|
response := pasienModels.PasienGetResponse{
|
|
Message: "Data pasien berhasil diambil",
|
|
Data: pasiens,
|
|
Meta: meta,
|
|
}
|
|
c.JSON(http.StatusOK, response)
|
|
}
|
|
|
|
// GetPasienByLocation godoc
|
|
// @Summary Get Pasien by Location
|
|
// @Description Get pasien by location (provinsi, kota, kecamatan, kelurahan)
|
|
// @Tags Pasien
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Param kelurahan query int false "Filter by kelurahan ID"
|
|
// @Param kdkecamatan query int false "Filter by kdkecamatan ID"
|
|
// @Param kota query int false "Filter by kota ID"
|
|
// @Param kdprovinsi query int false "Filter by kdprovinsi ID"
|
|
// @Param limit query int false "Limit (max 100)" default(10)
|
|
// @Param offset query int false "Offset" default(0)
|
|
// @Success 200 {object} pasienModels.PasienGetResponse "Success response"
|
|
// @Failure 400 {object} models.ErrorResponse "Bad request"
|
|
// @Failure 500 {object} models.ErrorResponse "Internal server error"
|
|
// @Router /api/v1/pasien/by-location [get]
|
|
func (h *PasienHandler) GetPasienByLocation(c *gin.Context) {
|
|
// Parse location filters
|
|
var filters []queryUtils.DynamicFilter
|
|
|
|
if kelurahan := c.Query("kelurahan"); kelurahan != "" {
|
|
if kelurahanID, err := strconv.Atoi(kelurahan); err == nil {
|
|
filters = append(filters, queryUtils.DynamicFilter{Column: "kelurahan", Operator: queryUtils.OpEqual, Value: kelurahanID})
|
|
}
|
|
}
|
|
|
|
if kdkecamatan := c.Query("kdkecamatan"); kdkecamatan != "" {
|
|
if kdkecamatanID, err := strconv.Atoi(kdkecamatan); err == nil {
|
|
filters = append(filters, queryUtils.DynamicFilter{Column: "kdkecamatan", Operator: queryUtils.OpEqual, Value: kdkecamatanID})
|
|
}
|
|
}
|
|
|
|
if kota := c.Query("kota"); kota != "" {
|
|
if kotaID, err := strconv.Atoi(kota); err == nil {
|
|
filters = append(filters, queryUtils.DynamicFilter{Column: "kota", Operator: queryUtils.OpEqual, Value: kotaID})
|
|
}
|
|
}
|
|
|
|
if kdprovinsi := c.Query("kdprovinsi"); kdprovinsi != "" {
|
|
if kdprovinsiID, err := strconv.Atoi(kdprovinsi); err == nil {
|
|
filters = append(filters, queryUtils.DynamicFilter{Column: "kdprovinsi", Operator: queryUtils.OpEqual, Value: kdprovinsiID})
|
|
}
|
|
}
|
|
|
|
// Parse pagination
|
|
limit, offset := 10, 0
|
|
if limitStr := c.Query("limit"); limitStr != "" {
|
|
if l, err := strconv.Atoi(limitStr); err == nil && l > 0 && l <= 100 {
|
|
limit = l
|
|
}
|
|
}
|
|
if offsetStr := c.Query("offset"); offsetStr != "" {
|
|
if o, err := strconv.Atoi(offsetStr); err == nil && o >= 0 {
|
|
offset = o
|
|
}
|
|
}
|
|
|
|
// Use GetSQLXDB to get database connection
|
|
dbConn, err := h.db.GetSQLXDB("postgres_simrs")
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second)
|
|
defer cancel()
|
|
|
|
// Build query
|
|
query := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{
|
|
{Expression: "id"},
|
|
{Expression: "nomr"},
|
|
{Expression: "status"},
|
|
{Expression: "title"},
|
|
{Expression: "nama"},
|
|
{Expression: "tempat"},
|
|
{Expression: "tgllahir"},
|
|
{Expression: "jeniskelamin"},
|
|
{Expression: "alamat"},
|
|
{Expression: "kelurahan"},
|
|
{Expression: "kdkecamatan"},
|
|
{Expression: "kota"},
|
|
{Expression: "kdprovinsi"},
|
|
{Expression: "agama"},
|
|
{Expression: "no_kartu"},
|
|
{Expression: "noktp_baru"},
|
|
{Expression: "namakelurahan"},
|
|
{Expression: "namakecamatan"},
|
|
{Expression: "namakota"},
|
|
{Expression: "namaprovinsi"},
|
|
},
|
|
Sort: []queryUtils.SortField{{Column: "date_created", Order: "DESC"}},
|
|
Limit: limit,
|
|
Offset: offset,
|
|
}
|
|
|
|
// Add joins for relationships using the correct structure
|
|
query.Joins = []queryUtils.Join{
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_provinsi",
|
|
Alias: "m_provinsi",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kota",
|
|
Alias: "m_kota",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kecamatan",
|
|
Alias: "m_kecamatan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kelurahan",
|
|
Alias: "m_kelurahan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
// Add filters if any
|
|
if len(filters) > 0 {
|
|
query.Filters = append(query.Filters, queryUtils.FilterGroup{
|
|
Filters: filters,
|
|
LogicOp: "AND",
|
|
})
|
|
}
|
|
|
|
// Execute query
|
|
pasiens, total, err := h.fetchPasiensDynamic(ctx, dbConn, query)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Build response
|
|
meta := h.calculateMeta(limit, offset, total)
|
|
response := pasienModels.PasienGetResponse{
|
|
Message: "Data pasien by location retrieved successfully",
|
|
Data: pasiens,
|
|
Meta: meta,
|
|
}
|
|
c.JSON(http.StatusOK, response)
|
|
}
|
|
|
|
// CreatePasien godoc
|
|
// @Summary Create Pasien
|
|
// @Description Create a new pasien
|
|
// @Tags Pasien
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Param request body pasienModels.PasienCreateRequest true "Pasien creation request"
|
|
// @Success 201 {object} pasienModels.PasienCreateResponse "Pasien created successfully"
|
|
// @Failure 400 {object} models.ErrorResponse "Bad request or validation error"
|
|
// @Failure 500 {object} models.ErrorResponse "Internal server error"
|
|
// @Router /api/v1/pasien/ [post]
|
|
func (h *PasienHandler) CreatePasien(c *gin.Context) {
|
|
var req pasienModels.PasienCreateRequest
|
|
if err := c.ShouldBindJSON(&req); err != nil {
|
|
h.respondError(c, "Invalid request body", err, http.StatusBadRequest)
|
|
return
|
|
}
|
|
if err := validate.Struct(&req); err != nil {
|
|
h.respondError(c, "Validation failed", err, http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Use GetSQLXDB to get database connection
|
|
dbConn, err := h.db.GetSQLXDB("postgres_simrs")
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second)
|
|
defer cancel()
|
|
|
|
// Validate id must be unique
|
|
if req.ID != nil {
|
|
rule := validation.NewUniqueFieldRule(
|
|
"m_pasien", // Table name
|
|
"id", // Column that must be unique
|
|
queryUtils.DynamicFilter{ // Additional condition
|
|
Column: "status",
|
|
Operator: queryUtils.OpNotEqual,
|
|
Value: "deleted",
|
|
},
|
|
)
|
|
|
|
// Prepare data from request for validation
|
|
dataToValidate := map[string]interface{}{
|
|
"id": *req.ID,
|
|
}
|
|
|
|
// Execute validation
|
|
isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to validate id", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if isDuplicate {
|
|
h.respondError(c, "id already exists", fmt.Errorf("duplicate id: %d", *req.ID), http.StatusConflict)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Validate nomr must be unique
|
|
if req.Nomr != nil && *req.Nomr != "" {
|
|
rule := validation.NewUniqueFieldRule(
|
|
"m_pasien", // Table name
|
|
"nomr", // Column that must be unique
|
|
queryUtils.DynamicFilter{ // Additional condition
|
|
Column: "status",
|
|
Operator: queryUtils.OpNotEqual,
|
|
Value: "deleted",
|
|
},
|
|
)
|
|
|
|
// Prepare data from request for validation
|
|
dataToValidate := map[string]interface{}{
|
|
"nomr": *req.Nomr,
|
|
}
|
|
|
|
// Execute validation
|
|
isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to validate nomr", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if isDuplicate {
|
|
h.respondError(c, "nomr already exists", fmt.Errorf("duplicate nomr: %s", *req.Nomr), http.StatusConflict)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Validate no_kartu must be unique
|
|
if req.NoKartu != nil && *req.NoKartu != "" {
|
|
rule := validation.NewUniqueFieldRule(
|
|
"m_pasien", // Table name
|
|
"no_kartu", // Column that must be unique
|
|
queryUtils.DynamicFilter{ // Additional condition
|
|
Column: "status",
|
|
Operator: queryUtils.OpNotEqual,
|
|
Value: "deleted",
|
|
},
|
|
)
|
|
|
|
// Prepare data from request for validation
|
|
dataToValidate := map[string]interface{}{
|
|
"no_kartu": *req.NoKartu,
|
|
}
|
|
|
|
// Execute validation
|
|
isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to validate no_kartu", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if isDuplicate {
|
|
h.respondError(c, "no_kartu already exists", fmt.Errorf("duplicate no_kartu: %s", *req.NoKartu), http.StatusConflict)
|
|
return
|
|
}
|
|
}
|
|
|
|
data := queryUtils.InsertData{
|
|
Columns: []string{
|
|
"status",
|
|
"date_created", "date_updated",
|
|
"id",
|
|
"nomr",
|
|
"status",
|
|
"title",
|
|
"nama",
|
|
"tempat",
|
|
"tgllahir",
|
|
"jeniskelamin",
|
|
"alamat",
|
|
"kelurahan",
|
|
"kdkecamatan",
|
|
"kota",
|
|
"kdprovinsi",
|
|
"agama",
|
|
"no_kartu",
|
|
"noktp_baru",
|
|
},
|
|
Values: []interface{}{
|
|
req.Status,
|
|
time.Now(), time.Now(),
|
|
req.ID,
|
|
req.Nomr,
|
|
req.Status,
|
|
req.Title,
|
|
req.Nama,
|
|
req.Tempat,
|
|
req.Tgllahir,
|
|
req.Jeniskelamin,
|
|
req.Alamat,
|
|
req.Kelurahan,
|
|
req.Kdkecamatan,
|
|
req.Kota,
|
|
req.Kdprovinsi,
|
|
req.Agama,
|
|
req.NoKartu,
|
|
req.NoktpBaru,
|
|
},
|
|
}
|
|
returningCols := []string{
|
|
"status",
|
|
"sort", "user_created", "date_created", "user_updated", "date_updated",
|
|
"id",
|
|
"nomr",
|
|
"status",
|
|
"title",
|
|
"nama",
|
|
"tempat",
|
|
"tgllahir",
|
|
"jeniskelamin",
|
|
"alamat",
|
|
"kelurahan",
|
|
"kdkecamatan",
|
|
"kota",
|
|
"kdprovinsi",
|
|
"agama",
|
|
"no_kartu",
|
|
"noktp_baru",
|
|
}
|
|
|
|
sql, args, err := h.queryBuilder.BuildInsertQuery("m_pasien", data, returningCols...)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to build insert query", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
var dataPasien pasienModels.Pasien
|
|
err = dbConn.GetContext(ctx, &dataPasien, sql, args...)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to create pasien", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Invalidate cache that might be affected
|
|
h.invalidateRelatedCache()
|
|
|
|
response := pasienModels.PasienCreateResponse{Message: "Pasien berhasil dibuat", Data: &dataPasien}
|
|
c.JSON(http.StatusCreated, response)
|
|
}
|
|
|
|
// GetPasien godoc
|
|
// @Summary Get Pasien List
|
|
// @Description Get list of pasien with pagination and filters
|
|
// @Tags Pasien
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Param limit query int false "Limit (max 100)" default(10)
|
|
// @Param offset query int false "Offset" default(0)
|
|
// @Param status query string false "Filter by status"
|
|
// @Param search query string false "Search in multiple fields"
|
|
// @Success 200 {object} pasienModels.PasienGetResponse "Success response"
|
|
// @Failure 400 {object} models.ErrorResponse "Bad request"
|
|
// @Failure 500 {object} models.ErrorResponse "Internal server error"
|
|
// @Router /api/v1/pasien/ [get]
|
|
func (h *PasienHandler) GetPasien(c *gin.Context) {
|
|
// Increase timeout for complex queries
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 120*time.Second)
|
|
defer cancel()
|
|
|
|
// Use the core fetchPasiensDynamic function for all data retrieval logic.
|
|
query := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{
|
|
{Expression: "id"},
|
|
{Expression: "nomr"},
|
|
{Expression: "status"},
|
|
{Expression: "title"},
|
|
{Expression: "nama"},
|
|
{Expression: "tempat"},
|
|
{Expression: "tgllahir"},
|
|
{Expression: "jeniskelamin"},
|
|
{Expression: "alamat"},
|
|
{Expression: "kelurahan"},
|
|
{Expression: "kdkecamatan"},
|
|
{Expression: "kota"},
|
|
{Expression: "kdprovinsi"},
|
|
{Expression: "agama"},
|
|
{Expression: "no_kartu"},
|
|
{Expression: "noktp_baru"},
|
|
{Expression: "namakelurahan"},
|
|
{Expression: "namakecamatan"},
|
|
{Expression: "namakota"},
|
|
{Expression: "namaprovinsi"},
|
|
},
|
|
Sort: []queryUtils.SortField{{Column: "date_created", Order: "DESC"}},
|
|
}
|
|
|
|
// Add joins for relationships using the correct structure
|
|
query.Joins = []queryUtils.Join{
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_provinsi",
|
|
Alias: "m_provinsi",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kota",
|
|
Alias: "m_kota",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kecamatan",
|
|
Alias: "m_kecamatan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kelurahan",
|
|
Alias: "m_kelurahan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
// Parse pagination
|
|
if limit, err := strconv.Atoi(c.DefaultQuery("limit", "10")); err == nil && limit > 0 && limit <= 100 {
|
|
query.Limit = limit
|
|
}
|
|
if offset, err := strconv.Atoi(c.DefaultQuery("offset", "0")); err == nil && offset >= 0 {
|
|
query.Offset = offset
|
|
}
|
|
|
|
// Use GetSQLXDB to get database connection
|
|
dbConn, err := h.db.GetSQLXDB("postgres_simrs")
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Parse simple filters
|
|
var filters []queryUtils.DynamicFilter
|
|
if status := c.Query("status"); status != "" && models.IsValidStatus(status) {
|
|
filters = append(filters, queryUtils.DynamicFilter{Column: "status", Operator: queryUtils.OpEqual, Value: status})
|
|
}
|
|
|
|
// Optimize query search with caching
|
|
search := c.Query("search")
|
|
var searchFilters []queryUtils.DynamicFilter
|
|
var cacheKey string
|
|
var useCache bool
|
|
|
|
// Initialize searchFilters before using it in the cache hit section
|
|
if search != "" {
|
|
// Limit search length to prevent slow queries
|
|
if len(search) > 50 {
|
|
search = search[:50]
|
|
}
|
|
|
|
// Generate cache key for search
|
|
cacheKey = fmt.Sprintf("pasien:search:%s:%d:%d", search, query.Limit, query.Offset)
|
|
|
|
// Initialize searchFilters here
|
|
searchFilters = []queryUtils.DynamicFilter{
|
|
{Column: "nomr", Operator: queryUtils.OpILike, Value: "%" + search + "%"},
|
|
{Column: "title", Operator: queryUtils.OpILike, Value: "%" + search + "%"},
|
|
{Column: "nama", Operator: queryUtils.OpILike, Value: "%" + search + "%"},
|
|
{Column: "no_kartu", Operator: queryUtils.OpILike, Value: "%" + search + "%"},
|
|
}
|
|
|
|
// Try to get from cache first
|
|
if cachedData, found := h.cache.Get(cacheKey); found {
|
|
logger.Info("Cache hit for search", map[string]interface{}{"search": search, "cache_key": cacheKey})
|
|
|
|
// Convert from interface{} to expected type
|
|
pasiens, ok := cachedData.([]pasienModels.Pasien)
|
|
if !ok {
|
|
logger.Error("Failed to convert cached data", map[string]interface{}{"cache_key": cacheKey})
|
|
} else {
|
|
// If requested, get aggregation data
|
|
var aggregateData *models.AggregateData
|
|
if c.Query("include_summary") == "true" {
|
|
// Build full filter groups for aggregate data (including search filters)
|
|
fullFilterGroups := []queryUtils.FilterGroup{
|
|
{Filters: searchFilters, LogicOp: "OR"},
|
|
}
|
|
if len(filters) > 0 {
|
|
fullFilterGroups = append(fullFilterGroups, queryUtils.FilterGroup{Filters: filters, LogicOp: "AND"})
|
|
}
|
|
aggregateData, err = h.getAggregateData(ctx, dbConn, fullFilterGroups)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to get aggregate data", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Build response
|
|
meta := h.calculateMeta(query.Limit, query.Offset, len(pasiens))
|
|
response := pasienModels.PasienGetResponse{
|
|
Message: "Data pasien berhasil diambil (dari cache)",
|
|
Data: pasiens,
|
|
Meta: meta,
|
|
}
|
|
|
|
if aggregateData != nil {
|
|
response.Summary = aggregateData
|
|
}
|
|
|
|
c.JSON(http.StatusOK, response)
|
|
return
|
|
}
|
|
}
|
|
|
|
// If not in cache, mark for saving after query
|
|
useCache = true
|
|
|
|
// If there's search, create OR filter group
|
|
query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: searchFilters, LogicOp: "OR"})
|
|
}
|
|
|
|
// Add other filters (if any) as AND group
|
|
if len(filters) > 0 {
|
|
query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: filters, LogicOp: "AND"})
|
|
}
|
|
|
|
pasiens, total, err := h.fetchPasiensDynamic(ctx, dbConn, query)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Save search results to cache if there's a search parameter
|
|
if useCache && len(pasiens) > 0 {
|
|
h.cache.Set(cacheKey, pasiens, 15*time.Minute) // Cache for 15 minutes
|
|
logger.Info("Cached search results", map[string]interface{}{"search": search, "cache_key": cacheKey, "count": len(pasiens)})
|
|
}
|
|
|
|
// If requested, get aggregation data
|
|
var aggregateData *models.AggregateData
|
|
if c.Query("include_summary") == "true" {
|
|
aggregateData, err = h.getAggregateData(ctx, dbConn, query.Filters)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to get aggregate data", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Build response
|
|
meta := h.calculateMeta(query.Limit, query.Offset, total)
|
|
response := pasienModels.PasienGetResponse{
|
|
Message: "Data pasien berhasil diambil",
|
|
Data: pasiens,
|
|
Meta: meta,
|
|
}
|
|
|
|
if aggregateData != nil {
|
|
response.Summary = aggregateData
|
|
}
|
|
|
|
c.JSON(http.StatusOK, response)
|
|
}
|
|
|
|
// GetPasienByID godoc
|
|
// @Summary Get Pasien by ID
|
|
// @Description Get pasien by ID
|
|
// @Tags Pasien
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Param id path string true "Pasien ID"
|
|
// @Success 200 {object} pasienModels.PasienGetByIDResponse "Success response"
|
|
// @Failure 400 {object} models.ErrorResponse "Invalid ID format"
|
|
// @Failure 404 {object} models.ErrorResponse "Pasien not found"
|
|
// @Failure 500 {object} models.ErrorResponse "Internal server error"
|
|
// @Router /api/v1/pasien/:id [get]
|
|
func (h *PasienHandler) GetPasienByID(c *gin.Context) {
|
|
id := c.Param("id")
|
|
if id == "" {
|
|
h.respondError(c, "Invalid ID format", fmt.Errorf("id cannot be empty"), http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Try to get from cache first
|
|
cacheKey := fmt.Sprintf("pasien:id:%s", id)
|
|
if cachedData, found := h.cache.Get(cacheKey); found {
|
|
logger.Info("Cache hit for id", map[string]interface{}{"id": id, "cache_key": cacheKey})
|
|
|
|
// Convert from interface{} to expected type
|
|
if cachedPasien, ok := cachedData.(pasienModels.Pasien); ok {
|
|
response := pasienModels.PasienGetByIDResponse{
|
|
Message: "Pasien details retrieved successfully (dari cache)",
|
|
Data: &cachedPasien,
|
|
}
|
|
c.JSON(http.StatusOK, response)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Use GetSQLXDB to get database connection
|
|
dbConn, err := h.db.GetSQLXDB("postgres_simrs")
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second)
|
|
defer cancel()
|
|
|
|
dynamicQuery := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{
|
|
{Expression: "id"},
|
|
{Expression: "nomr"},
|
|
{Expression: "status"},
|
|
{Expression: "title"},
|
|
{Expression: "nama"},
|
|
{Expression: "tempat"},
|
|
{Expression: "tgllahir"},
|
|
{Expression: "jeniskelamin"},
|
|
{Expression: "alamat"},
|
|
{Expression: "kelurahan"},
|
|
{Expression: "kdkecamatan"},
|
|
{Expression: "kota"},
|
|
{Expression: "kdprovinsi"},
|
|
{Expression: "agama"},
|
|
{Expression: "no_kartu"},
|
|
{Expression: "noktp_baru"},
|
|
{Expression: "namakelurahan"},
|
|
{Expression: "namakecamatan"},
|
|
{Expression: "namakota"},
|
|
{Expression: "namaprovinsi"},
|
|
},
|
|
Filters: []queryUtils.FilterGroup{{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "id", Operator: queryUtils.OpEqual, Value: id},
|
|
{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"},
|
|
},
|
|
LogicOp: "AND",
|
|
}},
|
|
Limit: 1,
|
|
}
|
|
|
|
// Add joins for relationships using the correct structure
|
|
dynamicQuery.Joins = []queryUtils.Join{
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_provinsi",
|
|
Alias: "m_provinsi",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kota",
|
|
Alias: "m_kota",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kecamatan",
|
|
Alias: "m_kecamatan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kelurahan",
|
|
Alias: "m_kelurahan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
var dataPasien pasienModels.Pasien
|
|
err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, dynamicQuery, &dataPasien)
|
|
if err != nil {
|
|
if err == sql.ErrNoRows {
|
|
h.respondError(c, "Pasien not found", err, http.StatusNotFound)
|
|
} else {
|
|
h.logAndRespondError(c, "Failed to get pasien", err, http.StatusInternalServerError)
|
|
}
|
|
return
|
|
}
|
|
|
|
// Save to cache
|
|
h.cache.Set(cacheKey, dataPasien, 30*time.Minute) // Cache for 30 minutes
|
|
|
|
response := pasienModels.PasienGetByIDResponse{
|
|
Message: "Pasien details retrieved successfully",
|
|
Data: &dataPasien,
|
|
}
|
|
c.JSON(http.StatusOK, response)
|
|
}
|
|
|
|
// GetPasienByNomr godoc
|
|
// @Summary Get Pasien by Nomr
|
|
// @Description Get pasien by Nomr
|
|
// @Tags Pasien
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Success 200 {object} pasienModels.PasienGetByNomrResponse "Success response"
|
|
// @Failure 400 {object} models.ErrorResponse "Bad request"
|
|
// @Failure 500 {object} models.ErrorResponse "Internal server error"
|
|
// @Router /api/v1/pasien/nomr/:nomr [get]
|
|
func (h *PasienHandler) GetPasienByNomr(c *gin.Context) {
|
|
// Increase timeout for complex queries
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 120*time.Second)
|
|
defer cancel()
|
|
|
|
// Use the core fetchPasiensDynamic function for all data retrieval logic.
|
|
query := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{
|
|
{Expression: "id"},
|
|
{Expression: "nomr"},
|
|
{Expression: "status"},
|
|
{Expression: "title"},
|
|
{Expression: "nama"},
|
|
{Expression: "tempat"},
|
|
{Expression: "tgllahir"},
|
|
{Expression: "jeniskelamin"},
|
|
{Expression: "alamat"},
|
|
{Expression: "kelurahan"},
|
|
{Expression: "kdkecamatan"},
|
|
{Expression: "kota"},
|
|
{Expression: "kdprovinsi"},
|
|
{Expression: "agama"},
|
|
{Expression: "no_kartu"},
|
|
{Expression: "noktp_baru"},
|
|
{Expression: "namakelurahan"},
|
|
{Expression: "namakecamatan"},
|
|
{Expression: "namakota"},
|
|
{Expression: "namaprovinsi"},
|
|
},
|
|
Sort: []queryUtils.SortField{{Column: "date_created", Order: "DESC"}},
|
|
}
|
|
|
|
// Add joins for relationships using the correct structure
|
|
query.Joins = []queryUtils.Join{
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_provinsi",
|
|
Alias: "m_provinsi",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kota",
|
|
Alias: "m_kota",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kecamatan",
|
|
Alias: "m_kecamatan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kelurahan",
|
|
Alias: "m_kelurahan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
// Parse pagination
|
|
if limit, err := strconv.Atoi(c.DefaultQuery("limit", "10")); err == nil && limit > 0 && limit <= 100 {
|
|
query.Limit = limit
|
|
}
|
|
if offset, err := strconv.Atoi(c.DefaultQuery("offset", "0")); err == nil && offset >= 0 {
|
|
query.Offset = offset
|
|
}
|
|
|
|
// Use GetSQLXDB to get database connection
|
|
dbConn, err := h.db.GetSQLXDB("postgres_simrs")
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Parse simple filters
|
|
var filters []queryUtils.DynamicFilter
|
|
if status := c.Query("status"); status != "" && models.IsValidStatus(status) {
|
|
filters = append(filters, queryUtils.DynamicFilter{Column: "status", Operator: queryUtils.OpEqual, Value: status})
|
|
}
|
|
|
|
// Optimize query search with caching
|
|
search := c.Query("search")
|
|
var searchFilters []queryUtils.DynamicFilter
|
|
var cacheKey string
|
|
var useCache bool
|
|
|
|
// Initialize searchFilters before using it in the cache hit section
|
|
if search != "" {
|
|
// Limit search length to prevent slow queries
|
|
if len(search) > 50 {
|
|
search = search[:50]
|
|
}
|
|
|
|
// Generate cache key for search
|
|
cacheKey = fmt.Sprintf("pasien:search:%s:%d:%d", search, query.Limit, query.Offset)
|
|
|
|
// Initialize searchFilters here
|
|
searchFilters = []queryUtils.DynamicFilter{
|
|
{Column: "nomr", Operator: queryUtils.OpILike, Value: "%" + search + "%"},
|
|
{Column: "title", Operator: queryUtils.OpILike, Value: "%" + search + "%"},
|
|
{Column: "nama", Operator: queryUtils.OpILike, Value: "%" + search + "%"},
|
|
{Column: "no_kartu", Operator: queryUtils.OpILike, Value: "%" + search + "%"},
|
|
}
|
|
|
|
// Try to get from cache first
|
|
if cachedData, found := h.cache.Get(cacheKey); found {
|
|
logger.Info("Cache hit for search", map[string]interface{}{"search": search, "cache_key": cacheKey})
|
|
|
|
// Convert from interface{} to expected type
|
|
pasiens, ok := cachedData.([]pasienModels.Pasien)
|
|
if !ok {
|
|
logger.Error("Failed to convert cached data", map[string]interface{}{"cache_key": cacheKey})
|
|
} else {
|
|
// If requested, get aggregation data
|
|
var aggregateData *models.AggregateData
|
|
if c.Query("include_summary") == "true" {
|
|
// Build full filter groups for aggregate data (including search filters)
|
|
fullFilterGroups := []queryUtils.FilterGroup{
|
|
{Filters: searchFilters, LogicOp: "OR"},
|
|
}
|
|
if len(filters) > 0 {
|
|
fullFilterGroups = append(fullFilterGroups, queryUtils.FilterGroup{Filters: filters, LogicOp: "AND"})
|
|
}
|
|
aggregateData, err = h.getAggregateData(ctx, dbConn, fullFilterGroups)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to get aggregate data", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Build response
|
|
meta := h.calculateMeta(query.Limit, query.Offset, len(pasiens))
|
|
response := pasienModels.PasienGetByNomrResponse{
|
|
Message: "Data pasien berhasil diambil (dari cache)",
|
|
Data: pasiens,
|
|
Meta: meta,
|
|
}
|
|
|
|
if aggregateData != nil {
|
|
response.Summary = aggregateData
|
|
}
|
|
|
|
c.JSON(http.StatusOK, response)
|
|
return
|
|
}
|
|
}
|
|
|
|
// If not in cache, mark for saving after query
|
|
useCache = true
|
|
|
|
// If there's search, create OR filter group
|
|
query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: searchFilters, LogicOp: "OR"})
|
|
}
|
|
|
|
// Add other filters (if any) as AND group
|
|
if len(filters) > 0 {
|
|
query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: filters, LogicOp: "AND"})
|
|
}
|
|
|
|
pasiens, total, err := h.fetchPasiensDynamic(ctx, dbConn, query)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Save search results to cache if there's a search parameter
|
|
if useCache && len(pasiens) > 0 {
|
|
h.cache.Set(cacheKey, pasiens, 15*time.Minute) // Cache for 15 minutes
|
|
logger.Info("Cached search results", map[string]interface{}{"search": search, "cache_key": cacheKey, "count": len(pasiens)})
|
|
}
|
|
|
|
// If requested, get aggregation data
|
|
var aggregateData *models.AggregateData
|
|
if c.Query("include_summary") == "true" {
|
|
aggregateData, err = h.getAggregateData(ctx, dbConn, query.Filters)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to get aggregate data", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Build response
|
|
meta := h.calculateMeta(query.Limit, query.Offset, total)
|
|
response := pasienModels.PasienGetByNomrResponse{
|
|
Message: "Data pasien berhasil diambil",
|
|
Data: pasiens,
|
|
Meta: meta,
|
|
}
|
|
|
|
if aggregateData != nil {
|
|
response.Summary = aggregateData
|
|
}
|
|
|
|
c.JSON(http.StatusOK, response)
|
|
}
|
|
|
|
// UpdatePasien godoc
|
|
// @Summary Update Pasien
|
|
// @Description Update an existing pasien
|
|
// @Tags Pasien
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Param id path string true "Pasien ID"
|
|
// @Param request body pasienModels.PasienUpdateRequest true "Pasien update request"
|
|
// @Success 200 {object} pasienModels.PasienUpdateResponse "Pasien updated successfully"
|
|
// @Failure 400 {object} models.ErrorResponse "Bad request or validation error"
|
|
// @Failure 404 {object} models.ErrorResponse "Pasien not found"
|
|
// @Failure 500 {object} models.ErrorResponse "Internal server error"
|
|
// @Router /api/v1/pasien/:nomr [put]
|
|
func (h *PasienHandler) UpdatePasien(c *gin.Context) {
|
|
id := c.Param("id")
|
|
if id == "" {
|
|
h.respondError(c, "Invalid ID format", fmt.Errorf("id cannot be empty"), http.StatusBadRequest)
|
|
return
|
|
}
|
|
var req pasienModels.PasienUpdateRequest
|
|
if err := c.ShouldBindJSON(&req); err != nil {
|
|
h.respondError(c, "Invalid request body", err, http.StatusBadRequest)
|
|
return
|
|
}
|
|
// Set the ID from path parameter to request
|
|
idInt, err := strconv.Atoi(id)
|
|
if err != nil {
|
|
h.respondError(c, "Invalid ID format", err, http.StatusBadRequest)
|
|
return
|
|
}
|
|
idInt32 := int32(idInt)
|
|
req.ID = &idInt32
|
|
if err := validate.Struct(&req); err != nil {
|
|
h.respondError(c, "Validation failed", err, http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Try to get old data for cache invalidation
|
|
var oldData pasienModels.Pasien
|
|
dbConn, err := h.db.GetSQLXDB("postgres_simrs")
|
|
if err == nil {
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 5*time.Second)
|
|
defer cancel()
|
|
|
|
dynamicQuery := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{{Expression: "*"}},
|
|
Filters: []queryUtils.FilterGroup{{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "id", Operator: queryUtils.OpEqual, Value: id},
|
|
},
|
|
LogicOp: "AND",
|
|
}},
|
|
Limit: 1,
|
|
}
|
|
|
|
// Add joins for relationships using the correct structure
|
|
dynamicQuery.Joins = []queryUtils.Join{
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_provinsi",
|
|
Alias: "m_provinsi",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kota",
|
|
Alias: "m_kota",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kecamatan",
|
|
Alias: "m_kecamatan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kelurahan",
|
|
Alias: "m_kelurahan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, dynamicQuery, &oldData)
|
|
if err != nil {
|
|
logger.Error("Failed to fetch old data for cache invalidation", map[string]interface{}{"error": err.Error(), "id": id})
|
|
}
|
|
}
|
|
|
|
// Use GetSQLXDB to get database connection
|
|
dbConn, err = h.db.GetSQLXDB("postgres_simrs")
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second)
|
|
defer cancel()
|
|
|
|
// Validate id must be unique, except for record with this id
|
|
if req.ID != nil {
|
|
rule := validation.ValidationRule{
|
|
TableName: "m_pasien",
|
|
UniqueColumns: []string{"id"},
|
|
Conditions: []queryUtils.DynamicFilter{
|
|
{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"},
|
|
},
|
|
ExcludeIDColumn: "id", // Exclude based on 'id' column
|
|
ExcludeIDValue: id, // ...with id value from parameter
|
|
}
|
|
|
|
dataToValidate := map[string]interface{}{
|
|
"id": *req.ID,
|
|
}
|
|
|
|
isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to validate id", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if isDuplicate {
|
|
h.respondError(c, "id already exists", fmt.Errorf("duplicate id: %d", *req.ID), http.StatusConflict)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Validate nomr must be unique, except for record with this id
|
|
if req.Nomr != nil && *req.Nomr != "" {
|
|
rule := validation.ValidationRule{
|
|
TableName: "m_pasien",
|
|
UniqueColumns: []string{"nomr"},
|
|
Conditions: []queryUtils.DynamicFilter{
|
|
{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"},
|
|
},
|
|
ExcludeIDColumn: "id", // Exclude based on 'id' column
|
|
ExcludeIDValue: id, // ...with id value from parameter
|
|
}
|
|
|
|
dataToValidate := map[string]interface{}{
|
|
"nomr": *req.Nomr,
|
|
}
|
|
|
|
isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to validate nomr", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if isDuplicate {
|
|
h.respondError(c, "nomr already exists", fmt.Errorf("duplicate nomr: %s", *req.Nomr), http.StatusConflict)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Validate no_kartu must be unique, except for record with this id
|
|
if req.NoKartu != nil && *req.NoKartu != "" {
|
|
rule := validation.ValidationRule{
|
|
TableName: "m_pasien",
|
|
UniqueColumns: []string{"no_kartu"},
|
|
Conditions: []queryUtils.DynamicFilter{
|
|
{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"},
|
|
},
|
|
ExcludeIDColumn: "id", // Exclude based on 'id' column
|
|
ExcludeIDValue: id, // ...with id value from parameter
|
|
}
|
|
|
|
dataToValidate := map[string]interface{}{
|
|
"no_kartu": *req.NoKartu,
|
|
}
|
|
|
|
isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to validate no_kartu", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if isDuplicate {
|
|
h.respondError(c, "no_kartu already exists", fmt.Errorf("duplicate no_kartu: %s", *req.NoKartu), http.StatusConflict)
|
|
return
|
|
}
|
|
}
|
|
|
|
updateData := queryUtils.UpdateData{
|
|
Columns: []string{
|
|
"status",
|
|
"date_updated",
|
|
"id",
|
|
"nomr",
|
|
"status",
|
|
"title",
|
|
"nama",
|
|
"tempat",
|
|
"tgllahir",
|
|
"jeniskelamin",
|
|
"alamat",
|
|
"kelurahan",
|
|
"kdkecamatan",
|
|
"kota",
|
|
"kdprovinsi",
|
|
"agama",
|
|
"no_kartu",
|
|
"noktp_baru",
|
|
},
|
|
Values: []interface{}{
|
|
req.Status,
|
|
time.Now(),
|
|
req.ID,
|
|
req.Nomr,
|
|
req.Status,
|
|
req.Title,
|
|
req.Nama,
|
|
req.Tempat,
|
|
req.Tgllahir,
|
|
req.Jeniskelamin,
|
|
req.Alamat,
|
|
req.Kelurahan,
|
|
req.Kdkecamatan,
|
|
req.Kota,
|
|
req.Kdprovinsi,
|
|
req.Agama,
|
|
req.NoKartu,
|
|
req.NoktpBaru,
|
|
},
|
|
}
|
|
filters := []queryUtils.FilterGroup{{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "id", Operator: queryUtils.OpEqual, Value: req.ID},
|
|
{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"},
|
|
},
|
|
LogicOp: "AND",
|
|
}}
|
|
returningCols := []string{
|
|
"status",
|
|
"sort", "user_created", "date_created", "user_updated", "date_updated",
|
|
"id",
|
|
"nomr",
|
|
"status",
|
|
"title",
|
|
"nama",
|
|
"tempat",
|
|
"tgllahir",
|
|
"jeniskelamin",
|
|
"alamat",
|
|
"kelurahan",
|
|
"kdkecamatan",
|
|
"kota",
|
|
"kdprovinsi",
|
|
"agama",
|
|
"no_kartu",
|
|
"noktp_baru",
|
|
}
|
|
|
|
sql, args, err := h.queryBuilder.BuildUpdateQuery("m_pasien", updateData, filters, returningCols...)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to build update query", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
var dataPasien pasienModels.Pasien
|
|
err = dbConn.GetContext(ctx, &dataPasien, sql, args...)
|
|
if err != nil {
|
|
if err.Error() == "sql: no rows in result set" {
|
|
h.respondError(c, "Pasien not found", err, http.StatusNotFound)
|
|
} else {
|
|
h.logAndRespondError(c, "Failed to update pasien", err, http.StatusInternalServerError)
|
|
}
|
|
return
|
|
}
|
|
|
|
// Invalidate cache that might be affected
|
|
// Invalidate cache for id that was updated
|
|
cacheKey := fmt.Sprintf("pasien:id:%s", id)
|
|
h.cache.Delete(cacheKey)
|
|
|
|
// Invalidate cache for old and new data
|
|
if oldData.ID != 0 {
|
|
h.invalidateRelatedCache()
|
|
}
|
|
h.invalidateRelatedCache()
|
|
|
|
response := pasienModels.PasienUpdateResponse{Message: "Pasien berhasil diperbarui", Data: &dataPasien}
|
|
c.JSON(http.StatusOK, response)
|
|
}
|
|
|
|
// DeletePasien godoc
|
|
// @Summary Delete Pasien
|
|
// @Description Delete a pasien
|
|
// @Tags Pasien
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Param id path string true "Pasien ID"
|
|
// @Success 200 {object} pasienModels.PasienDeleteResponse "Pasien deleted successfully"
|
|
// @Failure 400 {object} models.ErrorResponse "Invalid ID format"
|
|
// @Failure 404 {object} models.ErrorResponse "Pasien not found"
|
|
// @Failure 500 {object} models.ErrorResponse "Internal server error"
|
|
// @Router /api/v1/pasien/:nomr [delete]
|
|
func (h *PasienHandler) DeletePasien(c *gin.Context) {
|
|
id := c.Param("id")
|
|
if id == "" {
|
|
h.respondError(c, "Invalid ID format", fmt.Errorf("id cannot be empty"), http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Try to get data for cache invalidation
|
|
var dataToDelete pasienModels.Pasien
|
|
dbConn, err := h.db.GetSQLXDB("postgres_simrs")
|
|
if err == nil {
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 5*time.Second)
|
|
defer cancel()
|
|
|
|
dynamicQuery := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{{Expression: "*"}},
|
|
Filters: []queryUtils.FilterGroup{{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "id", Operator: queryUtils.OpEqual, Value: id},
|
|
},
|
|
LogicOp: "AND",
|
|
}},
|
|
Limit: 1,
|
|
}
|
|
|
|
// Add joins for relationships using the correct structure
|
|
dynamicQuery.Joins = []queryUtils.Join{
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_provinsi",
|
|
Alias: "m_provinsi",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kota",
|
|
Alias: "m_kota",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kecamatan",
|
|
Alias: "m_kecamatan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Type: "LEFT",
|
|
Table: "m_kelurahan",
|
|
Alias: "m_kelurahan",
|
|
OnConditions: queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, dynamicQuery, &dataToDelete)
|
|
if err != nil {
|
|
logger.Error("Failed to fetch data for cache invalidation", map[string]interface{}{"error": err.Error(), "id": id})
|
|
}
|
|
}
|
|
|
|
// Use GetSQLXDB to get database connection
|
|
dbConn, err = h.db.GetSQLXDB("postgres_simrs")
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second)
|
|
defer cancel()
|
|
|
|
// Use ExecuteUpdate for soft delete by changing status
|
|
updateData := queryUtils.UpdateData{
|
|
Columns: []string{"status", "date_updated"},
|
|
Values: []interface{}{"deleted", time.Now()},
|
|
}
|
|
filters := []queryUtils.FilterGroup{{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "id", Operator: queryUtils.OpEqual, Value: id},
|
|
{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"},
|
|
},
|
|
LogicOp: "AND",
|
|
}}
|
|
|
|
// Use ExecuteUpdate instead of ExecuteDelete
|
|
result, err := h.queryBuilder.ExecuteUpdate(ctx, dbConn, "m_pasien", updateData, filters)
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to delete pasien", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
rowsAffected, err := result.RowsAffected()
|
|
if err != nil {
|
|
h.logAndRespondError(c, "Failed to get affected rows", err, http.StatusInternalServerError)
|
|
return
|
|
}
|
|
if rowsAffected == 0 {
|
|
h.respondError(c, "Pasien not found", sql.ErrNoRows, http.StatusNotFound)
|
|
return
|
|
}
|
|
|
|
// Invalidate cache that might be affected
|
|
// Invalidate cache for id that was deleted
|
|
cacheKey := fmt.Sprintf("pasien:id:%s", id)
|
|
h.cache.Delete(cacheKey)
|
|
|
|
// Invalidate cache for data that was deleted
|
|
if dataToDelete.ID != 0 {
|
|
h.invalidateRelatedCache()
|
|
}
|
|
|
|
response := pasienModels.PasienDeleteResponse{Message: "Pasien berhasil dihapus", ID: id}
|
|
c.JSON(http.StatusOK, response)
|
|
}
|
|
|
|
// =============================================================================
|
|
// HELPER FUNCTIONS
|
|
// =============================================================================
|
|
|
|
// invalidateRelatedCache invalidates cache that might be affected by data changes
|
|
func (h *PasienHandler) invalidateRelatedCache() {
|
|
// Invalidate cache for search that might be affected
|
|
h.cache.DeleteByPrefix("pasien:search:")
|
|
h.cache.DeleteByPrefix("pasien:dynamic:")
|
|
h.cache.DeleteByPrefix("pasien:stats:")
|
|
h.cache.DeleteByPrefix("pasien:id:")
|
|
}
|
|
|
|
// fetchPasiensDynamic executes dynamic query with timeout handling
|
|
func (h *PasienHandler) fetchPasiensDynamic(ctx context.Context, dbConn *sqlx.DB, query queryUtils.DynamicQuery) ([]pasienModels.Pasien, int, error) {
|
|
logger.Info("Starting fetchPasiensDynamic", map[string]interface{}{
|
|
"limit": query.Limit,
|
|
"offset": query.Offset,
|
|
"from": query.From,
|
|
})
|
|
|
|
var total int
|
|
var pasiens []pasienModels.Pasien
|
|
|
|
// Check if query has search
|
|
hasSearch := false
|
|
for _, filterGroup := range query.Filters {
|
|
for _, filter := range filterGroup.Filters {
|
|
if filter.Operator == queryUtils.OpILike {
|
|
hasSearch = true
|
|
break
|
|
}
|
|
}
|
|
if hasSearch {
|
|
break
|
|
}
|
|
}
|
|
|
|
logger.Info("Query analysis", map[string]interface{}{
|
|
"hasSearch": hasSearch,
|
|
"totalFilters": len(query.Filters),
|
|
})
|
|
|
|
// Optimize to prevent timeout on search queries
|
|
// Use shorter context for search and count queries
|
|
queryCtx, queryCancel := context.WithTimeout(ctx, 30*time.Second)
|
|
defer queryCancel()
|
|
|
|
// For search queries, limit maximum to prevent timeout
|
|
if hasSearch {
|
|
search := getSearchTerm(query)
|
|
logger.Info("Executing search query with timeout context", map[string]interface{}{"search_term": search})
|
|
|
|
// Limit maximum search limit to prevent timeout
|
|
maxSearchLimit := 50
|
|
if query.Limit > maxSearchLimit {
|
|
query.Limit = maxSearchLimit
|
|
logger.Info("Reduced search limit to prevent timeout", map[string]interface{}{
|
|
"original_limit": query.Limit,
|
|
"new_limit": maxSearchLimit,
|
|
})
|
|
}
|
|
|
|
// Execute search query
|
|
err := h.queryBuilder.ExecuteQuery(queryCtx, dbConn, query, &pasiens)
|
|
if err != nil {
|
|
// Check if it's a PostgreSQL statement timeout error
|
|
if pqErr, ok := err.(*pq.Error); ok && pqErr.Code == "57014" {
|
|
logger.Warn("Search query timed out, trying fallback strategy", map[string]interface{}{
|
|
"search_term": search,
|
|
})
|
|
|
|
// Fallback: Search only in the most relevant column
|
|
// We need to rebuild the filters for the fallback
|
|
var fallbackFilters []queryUtils.FilterGroup
|
|
// Add other non-search filters back (e.g., status)
|
|
for _, fg := range query.Filters {
|
|
if fg.LogicOp == "AND" {
|
|
fallbackFilters = append(fallbackFilters, fg)
|
|
}
|
|
}
|
|
// Add the single, more specific search filter
|
|
searchableColumns := []string{
|
|
"nomr",
|
|
"title",
|
|
"nama",
|
|
"no_kartu",
|
|
}
|
|
if len(searchableColumns) > 0 {
|
|
fallbackFilters = append([]queryUtils.FilterGroup{{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: searchableColumns[0], Operator: queryUtils.OpILike, Value: "%" + search + "%"},
|
|
},
|
|
LogicOp: "AND",
|
|
}}, fallbackFilters...)
|
|
|
|
fallbackQuery := query
|
|
fallbackQuery.Filters = fallbackFilters
|
|
|
|
// Execute the fallback query with a shorter timeout
|
|
fallbackCtx, fallbackCancel := context.WithTimeout(ctx, 10*time.Second)
|
|
defer fallbackCancel()
|
|
|
|
err = h.queryBuilder.ExecuteQuery(fallbackCtx, dbConn, fallbackQuery, &pasiens)
|
|
if err != nil {
|
|
logger.Error("Fallback search query also failed", map[string]interface{}{
|
|
"error": err.Error(),
|
|
"query": fallbackQuery,
|
|
})
|
|
// Return a more user-friendly error
|
|
return nil, 0, fmt.Errorf("search timed out. The search term '%s' is too general. Please try a more specific term", search)
|
|
}
|
|
logger.Info("Fallback search query successful", map[string]interface{}{
|
|
"recordsFetched": len(pasiens),
|
|
})
|
|
}
|
|
} else {
|
|
// It's a different error, handle it as before
|
|
logger.Error("Failed to execute search query", map[string]interface{}{
|
|
"error": err.Error(),
|
|
"query": query,
|
|
})
|
|
return nil, 0, fmt.Errorf("failed to execute search query: %w", err)
|
|
}
|
|
}
|
|
|
|
// Estimate total for search query (don't count exact for performance)
|
|
total = len(pasiens)
|
|
if len(pasiens) == query.Limit {
|
|
// If reached limit, estimate there are more data
|
|
total = query.Offset + query.Limit + 100
|
|
} else {
|
|
total = query.Offset + len(pasiens)
|
|
}
|
|
} else {
|
|
logger.Info("Executing regular query without search")
|
|
|
|
// For queries without search, count total with shorter timeout
|
|
countCtx, countCancel := context.WithTimeout(ctx, 15*time.Second)
|
|
defer countCancel()
|
|
|
|
count, err := h.queryBuilder.ExecuteCount(countCtx, dbConn, query)
|
|
if err != nil {
|
|
// If count failed, fallback to estimation or return error
|
|
logger.Warn("Failed to get exact count, using estimation", map[string]interface{}{"error": err.Error()})
|
|
// For queries without search, we can estimate based on limit
|
|
total = query.Offset + query.Limit + 100 // Conservative estimation
|
|
} else {
|
|
total = int(count)
|
|
}
|
|
|
|
logger.Info("Count query successful", map[string]interface{}{
|
|
"count": total,
|
|
})
|
|
|
|
// Execute main data query
|
|
err = h.queryBuilder.ExecuteQuery(queryCtx, dbConn, query, &pasiens)
|
|
if err != nil {
|
|
logger.Error("Failed to execute main query", map[string]interface{}{
|
|
"error": err.Error(),
|
|
"query": query,
|
|
})
|
|
return nil, 0, fmt.Errorf("failed to execute main query: %w", err)
|
|
}
|
|
|
|
logger.Info("Data query successful", map[string]interface{}{
|
|
"recordsFetched": len(pasiens),
|
|
})
|
|
}
|
|
|
|
logger.Info("Query execution completed", map[string]interface{}{
|
|
"totalRecords": total,
|
|
"returnedRecords": len(pasiens),
|
|
"hasSearch": hasSearch,
|
|
})
|
|
|
|
return pasiens, total, nil
|
|
}
|
|
|
|
// getSearchTerm extracts the search term from a DynamicQuery object.
|
|
// It assumes the search is the first filter group with an "OR" logic operator.
|
|
func getSearchTerm(query queryUtils.DynamicQuery) string {
|
|
for _, filterGroup := range query.Filters {
|
|
if filterGroup.LogicOp == "OR" && len(filterGroup.Filters) > 0 {
|
|
if valueStr, ok := filterGroup.Filters[0].Value.(string); ok {
|
|
return strings.Trim(valueStr, "%")
|
|
}
|
|
}
|
|
}
|
|
return ""
|
|
}
|
|
|
|
// getAggregateData gets comprehensive statistics about pasien data
|
|
func (h *PasienHandler) getAggregateData(ctx context.Context, dbConn *sqlx.DB, filterGroups []queryUtils.FilterGroup) (*models.AggregateData, error) {
|
|
aggregate := &models.AggregateData{
|
|
ByStatus: make(map[string]int),
|
|
}
|
|
|
|
var wg sync.WaitGroup
|
|
var mu sync.Mutex
|
|
errChan := make(chan error, 4)
|
|
|
|
// 1. Count by status
|
|
wg.Add(1)
|
|
go func() {
|
|
defer wg.Done()
|
|
// Use context with shorter timeout
|
|
queryCtx, queryCancel := context.WithTimeout(ctx, 20*time.Second)
|
|
defer queryCancel()
|
|
|
|
query := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{
|
|
{Expression: "status"},
|
|
{Expression: "COUNT(*)", Alias: "count"},
|
|
},
|
|
Filters: filterGroups,
|
|
GroupBy: []string{"status"},
|
|
}
|
|
var results []struct {
|
|
Status string `db:"status"`
|
|
Count int `db:"count"`
|
|
}
|
|
err := h.queryBuilder.ExecuteQuery(queryCtx, dbConn, query, &results)
|
|
if err != nil {
|
|
errChan <- fmt.Errorf("status query failed: %w", err)
|
|
return
|
|
}
|
|
mu.Lock()
|
|
for _, result := range results {
|
|
aggregate.ByStatus[result.Status] = result.Count
|
|
switch result.Status {
|
|
case "active":
|
|
aggregate.TotalActive = result.Count
|
|
case "draft":
|
|
aggregate.TotalDraft = result.Count
|
|
case "inactive":
|
|
aggregate.TotalInactive = result.Count
|
|
}
|
|
}
|
|
mu.Unlock()
|
|
}()
|
|
|
|
// 4. Get last updated and today's stats
|
|
wg.Add(1)
|
|
go func() {
|
|
defer wg.Done()
|
|
// Use context with shorter timeout
|
|
queryCtx, queryCancel := context.WithTimeout(ctx, 20*time.Second)
|
|
defer queryCancel()
|
|
|
|
// Last updated
|
|
query1 := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{{Expression: "MAX(date_updated)"}},
|
|
Filters: filterGroups,
|
|
}
|
|
var lastUpdated sql.NullTime
|
|
err := h.queryBuilder.ExecuteQueryRow(queryCtx, dbConn, query1, &lastUpdated)
|
|
if err != nil {
|
|
errChan <- fmt.Errorf("last updated query failed: %w", err)
|
|
return
|
|
}
|
|
|
|
// Using QueryBuilder for today's statistics
|
|
today := time.Now().Format("2006-01-02")
|
|
|
|
// Query for created_today
|
|
createdTodayQuery := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{
|
|
{Expression: "COUNT(*)", Alias: "count"},
|
|
},
|
|
Filters: append(filterGroups, queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "DATE(date_created)", Operator: queryUtils.OpEqual, Value: today},
|
|
},
|
|
LogicOp: "AND",
|
|
}),
|
|
}
|
|
|
|
var createdToday int
|
|
err = h.queryBuilder.ExecuteQueryRow(queryCtx, dbConn, createdTodayQuery, &createdToday)
|
|
if err != nil {
|
|
errChan <- fmt.Errorf("created today query failed: %w", err)
|
|
return
|
|
}
|
|
|
|
// Query for updated_today (updated today but not created today)
|
|
updatedTodayQuery := queryUtils.DynamicQuery{
|
|
From: "m_pasien",
|
|
Fields: []queryUtils.SelectField{
|
|
{Expression: "COUNT(*)", Alias: "count"},
|
|
},
|
|
Filters: append(filterGroups, queryUtils.FilterGroup{
|
|
Filters: []queryUtils.DynamicFilter{
|
|
{Column: "DATE(date_updated)", Operator: queryUtils.OpEqual, Value: today},
|
|
{Column: "DATE(date_created)", Operator: queryUtils.OpNotEqual, Value: today},
|
|
},
|
|
LogicOp: "AND",
|
|
}),
|
|
}
|
|
|
|
var updatedToday int
|
|
err = h.queryBuilder.ExecuteQueryRow(queryCtx, dbConn, updatedTodayQuery, &updatedToday)
|
|
if err != nil {
|
|
errChan <- fmt.Errorf("updated today query failed: %w", err)
|
|
return
|
|
}
|
|
|
|
mu.Lock()
|
|
if lastUpdated.Valid {
|
|
aggregate.LastUpdated = &lastUpdated.Time
|
|
}
|
|
aggregate.CreatedToday = createdToday
|
|
aggregate.UpdatedToday = updatedToday
|
|
mu.Unlock()
|
|
}()
|
|
|
|
wg.Wait()
|
|
close(errChan)
|
|
|
|
for err := range errChan {
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
}
|
|
|
|
return aggregate, nil
|
|
}
|
|
|
|
// logAndRespondError logs an error and sends a JSON response
|
|
func (h *PasienHandler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) {
|
|
logger.Error(message, map[string]interface{}{"error": err.Error(), "status_code": statusCode})
|
|
h.respondError(c, message, err, statusCode)
|
|
}
|
|
|
|
// respondError sends a standardized JSON error response
|
|
func (h *PasienHandler) respondError(c *gin.Context, message string, err error, statusCode int) {
|
|
errorMessage := message
|
|
if gin.Mode() == gin.ReleaseMode {
|
|
errorMessage = "Internal server error"
|
|
}
|
|
c.JSON(statusCode, models.ErrorResponse{Error: errorMessage, Code: statusCode, Message: err.Error(), Timestamp: time.Now()})
|
|
}
|
|
|
|
// calculateMeta creates pagination metadata
|
|
func (h *PasienHandler) calculateMeta(limit, offset, total int) models.MetaResponse {
|
|
totalPages, currentPage := 0, 1
|
|
if limit > 0 {
|
|
totalPages = (total + limit - 1) / limit
|
|
currentPage = (offset / limit) + 1
|
|
}
|
|
return models.MetaResponse{
|
|
Limit: limit, Offset: offset, Total: total, TotalPages: totalPages,
|
|
CurrentPage: currentPage, HasNext: offset+limit < total, HasPrev: offset > 0,
|
|
}
|
|
}
|